import pandas as pd
import numpy as np
import seaborn as sns
import plotly.express as px
import matplotlib.pyplot as plt
import plotly.graph_objects as go
import category_encoders as ce
from sklearn.preprocessing import LabelEncoder
from imblearn.over_sampling import SMOTE
from plotly.offline import init_notebook_mode
init_notebook_mode(connected=True)
# Reading data
data = pd.read_csv("Bus_Breakdown_and_Delays.csv")
data.shape
(147972, 21)
data.head()
| School_Year | Busbreakdown_ID | Run_Type | Bus_No | Route_Number | Reason | Schools_Serviced | Occurred_On | Created_On | Boro | ... | How_Long_Delayed | Number_Of_Students_On_The_Bus | Has_Contractor_Notified_Schools | Has_Contractor_Notified_Parents | Have_You_Alerted_OPT | Informed_On | Incident_Number | Last_Updated_On | Breakdown_or_Running_Late | School_Age_or_PreK | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 2015-2016 | 1224901 | Pre-K/EI | 811 | 1 | Other | C353 | 10/26/2015 08:30:00 AM | 10/26/2015 08:40:00 AM | Bronx | ... | 10MINUTES | 5 | Yes | Yes | No | 10/26/2015 08:40:00 AM | NaN | 10/26/2015 08:40:39 AM | Running Late | Pre-K |
| 1 | 2015-2016 | 1225098 | Pre-K/EI | 9302 | 1 | Heavy Traffic | C814 | 10/27/2015 07:10:00 AM | 10/27/2015 07:11:00 AM | Bronx | ... | 25 MINUTES | 3 | Yes | Yes | No | 10/27/2015 07:11:00 AM | NaN | 10/27/2015 07:11:22 AM | Running Late | Pre-K |
| 2 | 2015-2016 | 1215800 | Pre-K/EI | 358 | 2 | Heavy Traffic | C195 | 09/18/2015 07:36:00 AM | 09/18/2015 07:38:00 AM | Bronx | ... | 15 MINUTES | 12 | Yes | Yes | Yes | 09/18/2015 07:38:00 AM | NaN | 09/18/2015 07:38:44 AM | Running Late | Pre-K |
| 3 | 2015-2016 | 1215511 | Pre-K/EI | 331 | 2 | Other | C178 | 09/17/2015 08:08:00 AM | 09/17/2015 08:12:00 AM | Bronx | ... | 10 minutes | 11 | Yes | Yes | Yes | 09/17/2015 08:12:00 AM | NaN | 09/17/2015 08:12:08 AM | Running Late | Pre-K |
| 4 | 2015-2016 | 1215828 | Pre-K/EI | 332 | 2 | Other | S176 | 09/18/2015 07:39:00 AM | 09/18/2015 07:45:00 AM | Bronx | ... | 10MINUTES | 12 | Yes | Yes | No | 09/18/2015 07:45:00 AM | NaN | 09/18/2015 07:56:40 AM | Running Late | Pre-K |
5 rows × 21 columns
data.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 147972 entries, 0 to 147971 Data columns (total 21 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 School_Year 147972 non-null object 1 Busbreakdown_ID 147972 non-null int64 2 Run_Type 147970 non-null object 3 Bus_No 147972 non-null object 4 Route_Number 147972 non-null object 5 Reason 147972 non-null object 6 Schools_Serviced 147972 non-null object 7 Occurred_On 147972 non-null object 8 Created_On 147972 non-null object 9 Boro 141654 non-null object 10 Bus_Company_Name 147972 non-null object 11 How_Long_Delayed 126342 non-null object 12 Number_Of_Students_On_The_Bus 147972 non-null int64 13 Has_Contractor_Notified_Schools 147972 non-null object 14 Has_Contractor_Notified_Parents 147972 non-null object 15 Have_You_Alerted_OPT 147972 non-null object 16 Informed_On 147972 non-null object 17 Incident_Number 5632 non-null object 18 Last_Updated_On 147972 non-null object 19 Breakdown_or_Running_Late 147972 non-null object 20 School_Age_or_PreK 147972 non-null object dtypes: int64(2), object(19) memory usage: 23.7+ MB
# Missing values %
missing_values = data.isna().sum()
missing_values= missing_values/data.shape[0] *100
missing_values
School_Year 0.000000 Busbreakdown_ID 0.000000 Run_Type 0.001352 Bus_No 0.000000 Route_Number 0.000000 Reason 0.000000 Schools_Serviced 0.000000 Occurred_On 0.000000 Created_On 0.000000 Boro 4.269727 Bus_Company_Name 0.000000 How_Long_Delayed 14.617630 Number_Of_Students_On_The_Bus 0.000000 Has_Contractor_Notified_Schools 0.000000 Has_Contractor_Notified_Parents 0.000000 Have_You_Alerted_OPT 0.000000 Informed_On 0.000000 Incident_Number 96.193875 Last_Updated_On 0.000000 Breakdown_or_Running_Late 0.000000 School_Age_or_PreK 0.000000 dtype: float64
# plotting missing values %
px.bar(missing_values,title="Missing Percentage",color = missing_values.index)
# Data distribution of run type
px.pie(data,names="Run_Type")
df = data.groupby(['School_Year', 'Breakdown_or_Running_Late']).size()
df = pd.DataFrame(df)
df = df.reset_index()
px.bar(x=df.School_Year,y=df[0],color=df.Breakdown_or_Running_Late,barmode="group",title="Delay analyis per year")
breakdown = pd.DataFrame(data.groupby(["Breakdown_or_Running_Late"]))[1][0]
running_late = pd.DataFrame(data.groupby(["Breakdown_or_Running_Late"]))[1][1]
breakdown.Route_Number.value_counts()[:10]
px.bar(breakdown.Route_Number.value_counts()[:10],color=breakdown.Route_Number.value_counts()[:10].values,title="Routes with maximum breakdown")
running_late.Route_Number.value_counts()[:10]
px.bar(running_late.Route_Number.value_counts()[:10],color=running_late.Route_Number.value_counts()[:10].values,title="Routes having maximum bus delays")
px.bar(data[data["Reason"] == "Heavy Traffic"].Route_Number.value_counts()[:20],title="Routes with high traffic")
not_bus_issue = ["Heavy Traffic","Weather Conditions","Delayed by School","Late return from Field Trip"]
# Getting data that is not present in not_bus_issue
df = data.query("Reason not in @not_bus_issue")
px.bar(df.Bus_No.value_counts()[0:50],color=df.Bus_No.value_counts()[0:50].values,title="Bus Encountring more mechanical faliures")
df = data.groupby(['School_Year', 'Schools_Serviced']).size()
df = pd.DataFrame(df)
df = df.reset_index()
px.bar(df.School_Year.value_counts(),color=df.School_Year.value_counts().values,title="Schools serviced Per Year")
not_bus_issue = ["Heavy Traffic","Weather Conditions","Delayed by School","Late return from Field Trip"]
df = data.query("Reason not in @not_bus_issue")
px.bar(df.Schools_Serviced.value_counts()[:25],color=df.Schools_Serviced.value_counts().values[:25],title="Schools that had poor transortation facility")
# Converting feature to datetime
data.Occurred_On = pd.to_datetime(data.Occurred_On)
# Get month names and plot delays per month
df = data.Occurred_On.dt.month_name().value_counts()
fig = go.Figure(data=go.Scatter(
x=df.index,
y=df.values,
mode='lines+markers',
marker=dict(size=df.values/500,
color=df.values)
))
fig.update_layout(
title="Number of delays per month")
fig.show()
# Get hourly delay
df = data.Occurred_On.dt.hour.value_counts()
px.scatter(df,color=df.values,size=df.values,hover_name=df.index,title="Delayes reported per hour")
# Converting to date time
data.Created_On = pd.to_datetime(data.Created_On)
# Get time gap between occurance and reported delay
data["time_gap"] = (data.Created_On-data.Occurred_On).astype('timedelta64[h]').values
df = data.loc[data['time_gap']!=data['time_gap'].max()]
df = df.loc[df['time_gap']!=df['time_gap'].min()]
px.box(x=df.time_gap.values,title="Time Delay(Minutes) between delay reported and delay creation")
not_bus_issue = ["Heavy Traffic","Weather Conditions","Delayed by School","Late return from Field Trip"]
df = data.query("Reason not in @not_bus_issue")
df = df.Bus_Company_Name.value_counts()
fig = go.Figure(data=go.Scatter(
x=df.index[:25],
y=df.values[:25],
mode='lines+markers',
marker=dict(size=df.values/100,
color=df.values)
))
fig.update_layout(
title="Top 25 companies with maximum mechanical issues")
fig.show()
df = data.groupby(['Boro', 'Breakdown_or_Running_Late']).size()
df = pd.DataFrame(df)
df = df.reset_index()
px.bar(x=df.Boro,y=df[0],color=df.Breakdown_or_Running_Late,barmode="group",title="Delay analysis per city")
px.pie(data,names="Boro",title="Delay chances per city")
import re
# Get delay in miinutes from How_Long_Delayed by applying regex expression
delayed_time = []
for x in data.How_Long_Delayed :
if x is not np.nan:
if re.findall(r"^[0-9]+",x):
delayed_time+= re.findall(r"^[0-9]+",x)
else:
delayed_time+=[0]
else:
delayed_time+=[np.nan]
data.How_Long_Delayed = delayed_time
# Get distribution of delay
px.box(data.How_Long_Delayed.dropna().astype(np.int64),title="Average delay")
px.pie(data,names="Has_Contractor_Notified_Parents",title="Has_Contractor_Notified_Parents")
df = data[["Occurred_On","Number_Of_Students_On_The_Bus"]]
df["month"] = data["Occurred_On"].dt.month_name()
# Sum of students per month in the bus
df = df.groupby(["month"]).Number_Of_Students_On_The_Bus.sum()
fig = go.Figure(data=go.Scatter(
x=df.index,
y=df.values,
mode='lines+markers',
marker=dict(size=df.values/1000,
color=df.values)
))
fig.update_layout(
title="Students lives Endangered per month")
fig.show()
<ipython-input-45-08b8c8ec9e85>:2: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
# Distribution of label
plt.title("Breakdown_or_Running_Late")
plt.pie(data.Breakdown_or_Running_Late.value_counts(),explode=[0,0.2],autopct="%.2f",labels=data.Breakdown_or_Running_Late.value_counts().index,radius=2)
plt.show()
# Extracting features month name and hour of day
data["month"] = data.Occurred_On.dt.month
data["time"] = data.Occurred_On.dt.hour
# Drpping irrelevant features
data.drop(["School_Year","Busbreakdown_ID","Schools_Serviced","Occurred_On","Created_On","Informed_On","Incident_Number","Last_Updated_On"],axis=1,inplace=True)
# Imputing nan values with mean of feature How_Long_Delayed
data.How_Long_Delayed.fillna("mean",inplace=True)
# Drop NULL values
data.dropna(inplace=True)
# Some values are useless so we will remove those values
remove_Run_Type = ["Project Read PM Run","Project Read AM Run","Project Read Field Trip"]
data = data.query("Reason not in @not_bus_issue")
# Label Encoding features
label_enc = LabelEncoder()
data.Has_Contractor_Notified_Schools = label_enc.fit_transform(data.Has_Contractor_Notified_Schools)
label_enc = LabelEncoder()
data.Has_Contractor_Notified_Parents = label_enc.fit_transform(data.Has_Contractor_Notified_Parents)
label_enc = LabelEncoder()
data.Have_You_Alerted_OPT = label_enc.fit_transform(data.Have_You_Alerted_OPT)
label_enc = LabelEncoder()
data.School_Age_or_PreK = label_enc.fit_transform(data.School_Age_or_PreK)
label_enc = LabelEncoder()
data.Breakdown_or_Running_Late = label_enc.fit_transform(data.Breakdown_or_Running_Late)
#heat map using Pearson's coefficient
plt.figure(figsize=(16, 6))
sns.heatmap(data.corr(), annot=True)
plt.title('Correlation Heatmap', fontdict={'fontsize':12}, pad=12);
# Hash Encoding features that have multiple distinct values
hash_enc=ce.HashingEncoder(cols='Bus_No',n_components=10)
data = hash_enc.fit_transform(data)
hash_enc=ce.HashingEncoder(cols='Route_Number',n_components=10)
data = hash_enc.fit_transform(data)
hash_enc=ce.HashingEncoder(cols='Bus_Company_Name',n_components=10)
data = hash_enc.fit_transform(data)
# Getting label
Y = data.Breakdown_or_Running_Late
Y.value_counts()
1 33245 0 16308 Name: Breakdown_or_Running_Late, dtype: int64
px.pie(names=Y.astype(str),title="Labels Before Balancing")
# Getting Feature
X = data.drop("Breakdown_or_Running_Late",axis=1)
X = pd.get_dummies(X)
oversample = SMOTE()
X, Y = oversample.fit_resample(X.values, Y.values)
print("Final Data Shape : ",X.shape)
Final Data Shape : (66490, 175)
plt.title("Balanced Labels")
plt.pie(pd.Series(Y).value_counts(),autopct="%.2f",labels=data.Breakdown_or_Running_Late.value_counts().index,radius=2)
plt.show()
from sklearn.decomposition import PCA
from sklearn.metrics import roc_curve
from sklearn.metrics import auc
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from xgboost import XGBClassifier
from catboost import CatBoostClassifier
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score,precision_score,recall_score
# Splitting data in ration 70:30
X_train,X_test,Y_train,Y_test = train_test_split(X,Y,test_size=0.3)
print("Train Data : ",X_train.shape)
print("Test Data : ",X_test.shape)
Train Data : (46543, 175) Test Data : (19947, 175)
# Applying PCA to reduce features
pca = PCA(n_components=4)
X_train = pca.fit_transform(X_train)
X_test = pca.transform(X_test)
print("Train Data : ",X_train.shape)
print("Test Data : ",X_test.shape)
Train Data : (46543, 4) Test Data : (19947, 4)
index = ["LogisticRegression","DecisionTreeClassifier","RandomForestClassifier","XGBClassifier","CatBoostClassifier"]
results = pd.DataFrame(columns=['Accuracy','Precison','Recall'],index=index)
log_reg = LogisticRegression(n_jobs=-1)
log_param_grid = [{"max_iter":[50,100,200],"tol" : [0.001,0.0001,0.01]}]
model_log = GridSearchCV(log_reg,log_param_grid,cv=10,scoring="accuracy")
model_log.fit(X_train,Y_train)
GridSearchCV(cv=10, estimator=LogisticRegression(n_jobs=-1),
param_grid=[{'max_iter': [50, 100, 200],
'tol': [0.001, 0.0001, 0.01]}],
scoring='accuracy')
model_log = LogisticRegression(**model_log.best_params_)
model_log.fit(X_train,Y_train)
pred_ = model_log.predict(X_test)
acc = accuracy_score(Y_test,pred_)
prec = precision_score(Y_test,pred_,average="weighted")
rec = recall_score(Y_test,pred_,average="weighted")
print("Accuracy :: ",acc)
print("Precision :: ",prec)
print("Recall :: ",rec)
results.loc["LogisticRegression"] = [acc,prec,rec]
Accuracy :: 0.6143781019702211 Precision :: 0.6322171132776994 Recall :: 0.6143781019702211
tree_clf = DecisionTreeClassifier()
tree_param_grid = [{"max_depth":[5,10,50,100],"max_leaf_nodes":list(range(2,10,2))}]
model_tree = GridSearchCV(tree_clf,tree_param_grid,cv=10,scoring="accuracy")
model_tree.fit(X_train,Y_train)
GridSearchCV(cv=10, estimator=DecisionTreeClassifier(),
param_grid=[{'max_depth': [5, 10, 50, 100],
'max_leaf_nodes': [2, 4, 6, 8]}],
scoring='accuracy')
tree_clf = DecisionTreeClassifier(**model_tree.best_params_)
tree_clf.fit(X_train,Y_train)
pred_ = tree_clf.predict(X_test)
acc = accuracy_score(Y_test,pred_)
prec = precision_score(Y_test,pred_,average="weighted")
rec = recall_score(Y_test,pred_,average="weighted")
print("Accuracy :: ",acc)
print("Precision :: ",prec)
print("Recall :: ",rec)
results.loc["DecisionTreeClassifier"] = [acc,prec,rec]
Accuracy :: 0.6324259287110844 Precision :: 0.6465071632824431 Recall :: 0.6324259287110844
forest_clf = RandomForestClassifier(n_jobs=-1)
forest_param_grid = [{"n_estimators":[50,100,200,250],"max_depth":[5,10,25,50],"max_features":[1,2,3,4]}]
model_forest = GridSearchCV(forest_clf,forest_param_grid,cv=10,scoring="accuracy")
model_forest.fit(X_train,Y_train)
GridSearchCV(cv=10, estimator=RandomForestClassifier(n_jobs=-1),
param_grid=[{'max_depth': [5, 10, 25, 50],
'max_features': [1, 2, 3, 4],
'n_estimators': [50, 100, 200, 250]}],
scoring='accuracy')
model_forest = RandomForestClassifier(**model_forest.best_params_)
model_forest.fit(X_train,Y_train)
pred_ = model_forest.predict(X_test)
acc = accuracy_score(Y_test,pred_)
prec = precision_score(Y_test,pred_,average="weighted")
rec = recall_score(Y_test,pred_,average="weighted")
print("Accuracy :: ",acc)
print("Precision :: ",prec)
print("Recall :: ",rec)
results.loc["RandomForestClassifier"] = [acc,prec,rec]
Accuracy :: 0.9138717601644357 Precision :: 0.9139496700151456 Recall :: 0.9138717601644357
xgb_clf = XGBClassifier(n_jobs=-1)
xgb_param_grid = [{"learning_rate":[0.1,0.01,0.001],"max_depth":[5,10,25,50]}]
model_xgb = GridSearchCV(xgb_clf,xgb_param_grid,cv=10,scoring="accuracy")
model_xgb.fit(X_train,Y_train)
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:00] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:03] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:05] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:07] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:10] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:12] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:14] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:17] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:19] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:21] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:24] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:29] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:33] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:38] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:43] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:48] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:53] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:03:58] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:04:03] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:04:07] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:04:12] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:04:23] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:04:34] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:04:45] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:04:55] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:05:06] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:05:17] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:05:28] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:05:39] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:05:49] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:06:00] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:06:18] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:06:35] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:06:51] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:07:08] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:07:25] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:07:41] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:07:58] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:08:15] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:08:32] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:08:48] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:08:51] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:08:53] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:08:55] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:08:58] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:00] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:03] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:05] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:07] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:10] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:12] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:17] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:22] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:27] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:32] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:37] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:42] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:47] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:52] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:09:57] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:10:02] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:10:12] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:10:24] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:10:34] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:10:45] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:10:56] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:11:06] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:11:17] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:11:27] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:11:38] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:11:49] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:12:01] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:12:13] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:12:25] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:12:37] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:12:50] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:13:02] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:13:14] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:13:26] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:13:38] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:13:51] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:13:53] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:13:55] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:13:58] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:00] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:03] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:05] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:07] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:10] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:12] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:15] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:20] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:25] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:30] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:35] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:40] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:45] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:50] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:14:55] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:15:00] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:15:05] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:15:15] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:15:25] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:15:35] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:15:46] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:15:57] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:16:07] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:16:17] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:16:28] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:16:38] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:16:49] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:17:01] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:17:12] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:17:24] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:17:36] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:17:48] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:18:00] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:18:12] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:18:24] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:18:36] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
C:\Users\soura\anaconda3\lib\site-packages\xgboost\sklearn.py:1224: UserWarning: The use of label encoder in XGBClassifier is deprecated and will be removed in a future release. To remove this warning, do the following: 1) Pass option use_label_encoder=False when constructing XGBClassifier object; and 2) Encode your labels (y) as integers starting with 0, i.e. 0, 1, 2, ..., [num_class - 1].
[23:18:47] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior.
GridSearchCV(cv=10,
estimator=XGBClassifier(base_score=None, booster=None,
colsample_bylevel=None,
colsample_bynode=None,
colsample_bytree=None,
enable_categorical=False, gamma=None,
gpu_id=None, importance_type=None,
interaction_constraints=None,
learning_rate=None, max_delta_step=None,
max_depth=None, min_child_weight=None,
missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=-1,
num_parallel_tree=None, predictor=None,
random_state=None, reg_alpha=None,
reg_lambda=None, scale_pos_weight=None,
subsample=None, tree_method=None,
validate_parameters=None, verbosity=None),
param_grid=[{'learning_rate': [0.1, 0.01, 0.001],
'max_depth': [5, 10, 25, 50]}],
scoring='accuracy')
model_xgb = XGBClassifier(**model_xgb.best_params_)
model_xgb.fit(X_train,Y_train)
pred_ = model_xgb.predict(X_test)
acc = accuracy_score(Y_test,pred_)
prec = precision_score(Y_test,pred_,average="weighted")
rec = recall_score(Y_test,pred_,average="weighted")
print("Accuracy :: ",acc)
print("Precision :: ",prec)
print("Recall :: ",rec)
results.loc["XGBClassifier"] = [acc,prec,rec]
[23:19:06] WARNING: C:/Users/Administrator/workspace/xgboost-win64_release_1.5.1/src/learner.cc:1115: Starting in XGBoost 1.3.0, the default evaluation metric used with the objective 'binary:logistic' was changed from 'error' to 'logloss'. Explicitly set eval_metric if you'd like to restore the old behavior. Accuracy :: 0.9129693688273925 Precision :: 0.9130361946727522 Recall :: 0.9129693688273925
cat_clf = CatBoostClassifier()
cat_param_grid = [{"n_estimators":[10,25,50],"learning_rate":[0.1,0.01,0.001]}]
model_cat = GridSearchCV(cat_clf,cat_param_grid,cv=10,scoring="accuracy")
model_cat.fit(X_train,Y_train)
0: learn: 0.6845521 total: 187ms remaining: 1.68s 1: learn: 0.6775051 total: 192ms remaining: 769ms 2: learn: 0.6716113 total: 197ms remaining: 460ms 3: learn: 0.6662326 total: 202ms remaining: 303ms 4: learn: 0.6606733 total: 206ms remaining: 206ms 5: learn: 0.6565534 total: 211ms remaining: 141ms 6: learn: 0.6529964 total: 216ms remaining: 92.5ms 7: learn: 0.6500911 total: 221ms remaining: 55.2ms 8: learn: 0.6466694 total: 226ms remaining: 25.1ms 9: learn: 0.6441484 total: 230ms remaining: 0us 0: learn: 0.6837951 total: 4.89ms remaining: 44.1ms 1: learn: 0.6767637 total: 9.95ms remaining: 39.8ms 2: learn: 0.6709601 total: 14.9ms remaining: 34.7ms 3: learn: 0.6664329 total: 19.8ms remaining: 29.7ms 4: learn: 0.6607243 total: 24.7ms remaining: 24.7ms 5: learn: 0.6557170 total: 29.5ms remaining: 19.6ms 6: learn: 0.6520528 total: 33.8ms remaining: 14.5ms 7: learn: 0.6490042 total: 38.7ms remaining: 9.67ms 8: learn: 0.6455551 total: 43.5ms remaining: 4.83ms 9: learn: 0.6427830 total: 48.2ms remaining: 0us 0: learn: 0.6840213 total: 5.03ms remaining: 45.3ms 1: learn: 0.6769563 total: 10ms remaining: 40ms 2: learn: 0.6704229 total: 14.7ms remaining: 34.3ms 3: learn: 0.6659820 total: 19.3ms remaining: 29ms 4: learn: 0.6608086 total: 23.8ms remaining: 23.8ms 5: learn: 0.6568028 total: 28.6ms remaining: 19.1ms 6: learn: 0.6521713 total: 33.4ms remaining: 14.3ms 7: learn: 0.6487712 total: 38.2ms remaining: 9.55ms 8: learn: 0.6462796 total: 43.2ms remaining: 4.79ms 9: learn: 0.6437662 total: 47.7ms remaining: 0us 0: learn: 0.6841860 total: 5ms remaining: 45ms 1: learn: 0.6761862 total: 10.1ms remaining: 40.5ms 2: learn: 0.6702003 total: 14.9ms remaining: 34.9ms 3: learn: 0.6648719 total: 19.8ms remaining: 29.7ms 4: learn: 0.6595880 total: 24.2ms remaining: 24.2ms 5: learn: 0.6555562 total: 29.1ms remaining: 19.4ms 6: learn: 0.6520243 total: 33.9ms remaining: 14.5ms 7: learn: 0.6488524 total: 38.8ms remaining: 9.7ms 8: learn: 0.6456400 total: 43.6ms remaining: 4.84ms 9: learn: 0.6427984 total: 48.4ms remaining: 0us 0: learn: 0.6834265 total: 5.05ms remaining: 45.5ms 1: learn: 0.6759055 total: 10.1ms remaining: 40.4ms 2: learn: 0.6698694 total: 14.9ms remaining: 34.8ms 3: learn: 0.6642132 total: 19.7ms remaining: 29.5ms 4: learn: 0.6589228 total: 24.5ms remaining: 24.5ms 5: learn: 0.6546309 total: 29.3ms remaining: 19.6ms 6: learn: 0.6511029 total: 34.1ms remaining: 14.6ms 7: learn: 0.6475361 total: 38.8ms remaining: 9.71ms 8: learn: 0.6442616 total: 43.7ms remaining: 4.86ms 9: learn: 0.6416422 total: 48.5ms remaining: 0us 0: learn: 0.6842268 total: 4.98ms remaining: 44.8ms 1: learn: 0.6760565 total: 10.2ms remaining: 40.7ms 2: learn: 0.6701379 total: 15.2ms remaining: 35.4ms 3: learn: 0.6652193 total: 20.1ms remaining: 30.1ms 4: learn: 0.6598572 total: 25ms remaining: 25ms 5: learn: 0.6557990 total: 29.8ms remaining: 19.8ms 6: learn: 0.6520352 total: 34.4ms remaining: 14.7ms 7: learn: 0.6487328 total: 39.1ms remaining: 9.78ms 8: learn: 0.6451650 total: 44.1ms remaining: 4.89ms 9: learn: 0.6424713 total: 48.5ms remaining: 0us 0: learn: 0.6834081 total: 4.76ms remaining: 42.9ms 1: learn: 0.6761479 total: 9.89ms remaining: 39.6ms 2: learn: 0.6702549 total: 14.9ms remaining: 34.7ms 3: learn: 0.6646763 total: 19.9ms remaining: 29.8ms 4: learn: 0.6595166 total: 24.3ms remaining: 24.3ms 5: learn: 0.6554302 total: 29.1ms remaining: 19.4ms 6: learn: 0.6516216 total: 33.7ms remaining: 14.4ms 7: learn: 0.6483806 total: 38.5ms remaining: 9.61ms 8: learn: 0.6450570 total: 43.4ms remaining: 4.83ms 9: learn: 0.6425214 total: 48.1ms remaining: 0us 0: learn: 0.6835852 total: 4.96ms remaining: 44.6ms 1: learn: 0.6766025 total: 9.72ms remaining: 38.9ms 2: learn: 0.6701163 total: 14.6ms remaining: 34.1ms 3: learn: 0.6646611 total: 19.4ms remaining: 29.1ms 4: learn: 0.6594722 total: 24.1ms remaining: 24.1ms 5: learn: 0.6553730 total: 29.1ms remaining: 19.4ms 6: learn: 0.6514609 total: 33.9ms remaining: 14.5ms 7: learn: 0.6477935 total: 38.8ms remaining: 9.7ms 8: learn: 0.6441283 total: 43.9ms remaining: 4.88ms 9: learn: 0.6413718 total: 48.6ms remaining: 0us 0: learn: 0.6837982 total: 4.89ms remaining: 44ms 1: learn: 0.6765593 total: 9.6ms remaining: 38.4ms 2: learn: 0.6705135 total: 14.1ms remaining: 32.8ms 3: learn: 0.6652650 total: 18.9ms remaining: 28.3ms 4: learn: 0.6599831 total: 23.6ms remaining: 23.6ms 5: learn: 0.6559021 total: 28.2ms remaining: 18.8ms 6: learn: 0.6525644 total: 33ms remaining: 14.2ms 7: learn: 0.6488499 total: 37.8ms remaining: 9.44ms 8: learn: 0.6456072 total: 42.6ms remaining: 4.74ms 9: learn: 0.6429542 total: 47.3ms remaining: 0us 0: learn: 0.6843003 total: 4.76ms remaining: 42.9ms 1: learn: 0.6772806 total: 9.78ms remaining: 39.1ms 2: learn: 0.6712385 total: 14.5ms remaining: 33.8ms 3: learn: 0.6665583 total: 19.3ms remaining: 28.9ms 4: learn: 0.6612645 total: 24ms remaining: 24ms 5: learn: 0.6574435 total: 29ms remaining: 19.3ms 6: learn: 0.6541627 total: 33.6ms remaining: 14.4ms 7: learn: 0.6506563 total: 38.4ms remaining: 9.61ms 8: learn: 0.6474845 total: 43.1ms remaining: 4.79ms 9: learn: 0.6449232 total: 47.6ms remaining: 0us 0: learn: 0.6845521 total: 4.81ms remaining: 115ms 1: learn: 0.6775051 total: 10.1ms remaining: 116ms 2: learn: 0.6716113 total: 15.8ms remaining: 116ms 3: learn: 0.6662326 total: 20.8ms remaining: 109ms 4: learn: 0.6606733 total: 25.4ms remaining: 102ms 5: learn: 0.6565534 total: 30.3ms remaining: 96ms 6: learn: 0.6529964 total: 35.3ms remaining: 90.8ms 7: learn: 0.6500911 total: 40ms remaining: 85.1ms 8: learn: 0.6466694 total: 44.9ms remaining: 79.8ms 9: learn: 0.6441484 total: 50.7ms remaining: 76.1ms 10: learn: 0.6402131 total: 56ms remaining: 71.3ms 11: learn: 0.6379078 total: 61ms remaining: 66.1ms 12: learn: 0.6358432 total: 66.4ms remaining: 61.3ms 13: learn: 0.6343783 total: 71ms remaining: 55.8ms 14: learn: 0.6324987 total: 76.2ms remaining: 50.8ms 15: learn: 0.6302252 total: 81.4ms remaining: 45.8ms 16: learn: 0.6278113 total: 86.8ms remaining: 40.8ms 17: learn: 0.6256164 total: 91.8ms remaining: 35.7ms 18: learn: 0.6239967 total: 96.6ms remaining: 30.5ms 19: learn: 0.6220147 total: 101ms remaining: 25.3ms 20: learn: 0.6203100 total: 106ms remaining: 20.2ms 21: learn: 0.6187106 total: 111ms remaining: 15.1ms 22: learn: 0.6175532 total: 115ms remaining: 10ms 23: learn: 0.6157316 total: 120ms remaining: 4.99ms 24: learn: 0.6129482 total: 124ms remaining: 0us 0: learn: 0.6837951 total: 4.94ms remaining: 119ms 1: learn: 0.6767637 total: 10.2ms remaining: 117ms 2: learn: 0.6709601 total: 15ms remaining: 110ms 3: learn: 0.6664329 total: 19.8ms remaining: 104ms 4: learn: 0.6607243 total: 24.2ms remaining: 96.9ms 5: learn: 0.6557170 total: 29ms remaining: 91.7ms 6: learn: 0.6520528 total: 33.4ms remaining: 86ms 7: learn: 0.6490042 total: 38.1ms remaining: 81ms 8: learn: 0.6455551 total: 43ms remaining: 76.5ms 9: learn: 0.6427830 total: 47.4ms remaining: 71.1ms 10: learn: 0.6399077 total: 52ms remaining: 66.2ms 11: learn: 0.6373909 total: 56.9ms remaining: 61.6ms 12: learn: 0.6351058 total: 61.6ms remaining: 56.9ms 13: learn: 0.6323107 total: 66.5ms remaining: 52.3ms 14: learn: 0.6302779 total: 71.2ms remaining: 47.5ms 15: learn: 0.6286723 total: 75.9ms remaining: 42.7ms 16: learn: 0.6269330 total: 80.9ms remaining: 38.1ms 17: learn: 0.6248633 total: 85.6ms remaining: 33.3ms 18: learn: 0.6224497 total: 90.2ms remaining: 28.5ms 19: learn: 0.6211614 total: 94.6ms remaining: 23.6ms 20: learn: 0.6204438 total: 98.6ms remaining: 18.8ms 21: learn: 0.6190248 total: 103ms remaining: 14ms 22: learn: 0.6169806 total: 107ms remaining: 9.33ms 23: learn: 0.6152174 total: 112ms remaining: 4.65ms 24: learn: 0.6136399 total: 116ms remaining: 0us 0: learn: 0.6840213 total: 5.16ms remaining: 124ms 1: learn: 0.6769563 total: 10.5ms remaining: 121ms 2: learn: 0.6704229 total: 15.4ms remaining: 113ms 3: learn: 0.6659820 total: 20.4ms remaining: 107ms 4: learn: 0.6608086 total: 24.9ms remaining: 99.6ms 5: learn: 0.6568028 total: 29.9ms remaining: 94.5ms 6: learn: 0.6521713 total: 34.5ms remaining: 88.6ms 7: learn: 0.6487712 total: 39.2ms remaining: 83.4ms 8: learn: 0.6462796 total: 44.2ms remaining: 78.6ms 9: learn: 0.6437662 total: 48.9ms remaining: 73.4ms 10: learn: 0.6398717 total: 53.8ms remaining: 68.5ms 11: learn: 0.6376752 total: 58.5ms remaining: 63.4ms 12: learn: 0.6358466 total: 63.1ms remaining: 58.3ms 13: learn: 0.6333864 total: 67.8ms remaining: 53.3ms 14: learn: 0.6316757 total: 72.6ms remaining: 48.4ms 15: learn: 0.6301148 total: 77.6ms remaining: 43.6ms 16: learn: 0.6279136 total: 82.4ms remaining: 38.8ms 17: learn: 0.6257547 total: 87.2ms remaining: 33.9ms 18: learn: 0.6241292 total: 91.6ms remaining: 28.9ms 19: learn: 0.6218127 total: 95.9ms remaining: 24ms 20: learn: 0.6195606 total: 100ms remaining: 19ms 21: learn: 0.6178590 total: 104ms remaining: 14.2ms 22: learn: 0.6163946 total: 108ms remaining: 9.42ms 23: learn: 0.6140180 total: 112ms remaining: 4.68ms 24: learn: 0.6122791 total: 117ms remaining: 0us 0: learn: 0.6841860 total: 4.82ms remaining: 116ms 1: learn: 0.6761862 total: 9.69ms remaining: 111ms 2: learn: 0.6702003 total: 14.5ms remaining: 107ms 3: learn: 0.6648719 total: 19.3ms remaining: 101ms 4: learn: 0.6595880 total: 23.9ms remaining: 95.7ms 5: learn: 0.6555562 total: 28.5ms remaining: 90.3ms 6: learn: 0.6520243 total: 33.1ms remaining: 85ms 7: learn: 0.6488524 total: 37.9ms remaining: 80.5ms 8: learn: 0.6456400 total: 42.6ms remaining: 75.7ms 9: learn: 0.6427984 total: 47.3ms remaining: 70.9ms 10: learn: 0.6402342 total: 52.2ms remaining: 66.4ms 11: learn: 0.6380172 total: 57ms remaining: 61.8ms 12: learn: 0.6360206 total: 61.7ms remaining: 56.9ms 13: learn: 0.6339117 total: 66.5ms remaining: 52.2ms 14: learn: 0.6317430 total: 71.5ms remaining: 47.6ms 15: learn: 0.6300417 total: 75.9ms remaining: 42.7ms 16: learn: 0.6281009 total: 80.5ms remaining: 37.9ms 17: learn: 0.6261908 total: 85.2ms remaining: 33.1ms 18: learn: 0.6235717 total: 90ms remaining: 28.4ms 19: learn: 0.6214241 total: 94.7ms remaining: 23.7ms 20: learn: 0.6198443 total: 99.6ms remaining: 19ms 21: learn: 0.6183196 total: 104ms remaining: 14.2ms 22: learn: 0.6168139 total: 109ms remaining: 9.44ms 23: learn: 0.6150096 total: 113ms remaining: 4.7ms 24: learn: 0.6132929 total: 117ms remaining: 0us 0: learn: 0.6834265 total: 4.87ms remaining: 117ms 1: learn: 0.6759055 total: 10ms remaining: 115ms 2: learn: 0.6698694 total: 14.8ms remaining: 109ms 3: learn: 0.6642132 total: 19.6ms remaining: 103ms 4: learn: 0.6589228 total: 25ms remaining: 100ms 5: learn: 0.6546309 total: 29.7ms remaining: 94.2ms 6: learn: 0.6511029 total: 34.7ms remaining: 89.1ms 7: learn: 0.6475361 total: 39.4ms remaining: 83.6ms 8: learn: 0.6442616 total: 44.1ms remaining: 78.5ms 9: learn: 0.6416422 total: 48.8ms remaining: 73.2ms 10: learn: 0.6390249 total: 53.6ms remaining: 68.3ms 11: learn: 0.6353742 total: 58.4ms remaining: 63.3ms 12: learn: 0.6339076 total: 63ms remaining: 58.2ms 13: learn: 0.6312348 total: 67.6ms remaining: 53.1ms 14: learn: 0.6294345 total: 72.3ms remaining: 48.2ms 15: learn: 0.6280725 total: 76.9ms remaining: 43.2ms 16: learn: 0.6261755 total: 81.6ms remaining: 38.4ms 17: learn: 0.6242236 total: 86.3ms remaining: 33.6ms 18: learn: 0.6226401 total: 90.7ms remaining: 28.6ms 19: learn: 0.6216806 total: 95ms remaining: 23.8ms 20: learn: 0.6195315 total: 99.7ms remaining: 19ms 21: learn: 0.6179573 total: 104ms remaining: 14.2ms 22: learn: 0.6166702 total: 109ms remaining: 9.46ms 23: learn: 0.6150086 total: 113ms remaining: 4.72ms 24: learn: 0.6131657 total: 117ms remaining: 0us 0: learn: 0.6842268 total: 5.08ms remaining: 122ms 1: learn: 0.6760565 total: 10.3ms remaining: 119ms 2: learn: 0.6701379 total: 15.2ms remaining: 112ms 3: learn: 0.6652193 total: 19.7ms remaining: 103ms 4: learn: 0.6598572 total: 24.4ms remaining: 97.5ms 5: learn: 0.6557990 total: 29.2ms remaining: 92.3ms 6: learn: 0.6520352 total: 33.9ms remaining: 87.1ms 7: learn: 0.6487328 total: 38.4ms remaining: 81.7ms 8: learn: 0.6451650 total: 43.3ms remaining: 77ms 9: learn: 0.6424713 total: 47.7ms remaining: 71.6ms 10: learn: 0.6398143 total: 52.4ms remaining: 66.7ms 11: learn: 0.6374917 total: 57.2ms remaining: 62ms 12: learn: 0.6355606 total: 61.7ms remaining: 57ms 13: learn: 0.6329508 total: 66.9ms remaining: 52.5ms 14: learn: 0.6308641 total: 71.8ms remaining: 47.8ms 15: learn: 0.6292939 total: 76.5ms remaining: 43ms 16: learn: 0.6271362 total: 81.3ms remaining: 38.3ms 17: learn: 0.6251023 total: 85.9ms remaining: 33.4ms 18: learn: 0.6233438 total: 90.4ms remaining: 28.6ms 19: learn: 0.6221818 total: 95ms remaining: 23.7ms 20: learn: 0.6202375 total: 99.4ms remaining: 18.9ms 21: learn: 0.6187741 total: 104ms remaining: 14.1ms 22: learn: 0.6168694 total: 108ms remaining: 9.38ms 23: learn: 0.6146085 total: 112ms remaining: 4.66ms 24: learn: 0.6133933 total: 116ms remaining: 0us 0: learn: 0.6834081 total: 4.67ms remaining: 112ms 1: learn: 0.6761479 total: 9.76ms remaining: 112ms 2: learn: 0.6702549 total: 14.8ms remaining: 108ms 3: learn: 0.6646763 total: 19.8ms remaining: 104ms 4: learn: 0.6595166 total: 24.3ms remaining: 97.2ms 5: learn: 0.6554302 total: 28.9ms remaining: 91.6ms 6: learn: 0.6516216 total: 33.7ms remaining: 86.7ms 7: learn: 0.6483806 total: 38.4ms remaining: 81.6ms 8: learn: 0.6450570 total: 43.2ms remaining: 76.8ms 9: learn: 0.6425214 total: 47.7ms remaining: 71.6ms 10: learn: 0.6398332 total: 52.5ms remaining: 66.8ms 11: learn: 0.6376388 total: 57.3ms remaining: 62.1ms 12: learn: 0.6356101 total: 62ms remaining: 57.2ms 13: learn: 0.6330268 total: 66.6ms remaining: 52.3ms 14: learn: 0.6309585 total: 71.4ms remaining: 47.6ms 15: learn: 0.6293844 total: 76.1ms remaining: 42.8ms 16: learn: 0.6272470 total: 81.1ms remaining: 38.2ms 17: learn: 0.6252113 total: 85.9ms remaining: 33.4ms 18: learn: 0.6235501 total: 90.6ms remaining: 28.6ms 19: learn: 0.6224487 total: 95.1ms remaining: 23.8ms 20: learn: 0.6208627 total: 99.6ms remaining: 19ms 21: learn: 0.6193045 total: 104ms remaining: 14.2ms 22: learn: 0.6179148 total: 108ms remaining: 9.41ms 23: learn: 0.6161479 total: 112ms remaining: 4.68ms 24: learn: 0.6143506 total: 117ms remaining: 0us 0: learn: 0.6835852 total: 4.9ms remaining: 118ms 1: learn: 0.6766025 total: 9.64ms remaining: 111ms 2: learn: 0.6701163 total: 14.5ms remaining: 107ms 3: learn: 0.6646611 total: 19.6ms remaining: 103ms 4: learn: 0.6594722 total: 24.6ms remaining: 98.5ms 5: learn: 0.6553730 total: 29.5ms remaining: 93.4ms 6: learn: 0.6514609 total: 34.3ms remaining: 88.1ms 7: learn: 0.6477935 total: 38.9ms remaining: 82.7ms 8: learn: 0.6441283 total: 43.8ms remaining: 77.9ms 9: learn: 0.6413718 total: 48.6ms remaining: 72.9ms 10: learn: 0.6386036 total: 53.3ms remaining: 67.9ms 11: learn: 0.6363458 total: 58.1ms remaining: 62.9ms 12: learn: 0.6347685 total: 62.8ms remaining: 58ms 13: learn: 0.6326723 total: 67.3ms remaining: 52.8ms 14: learn: 0.6307543 total: 71.9ms remaining: 48ms 15: learn: 0.6292493 total: 76.7ms remaining: 43.2ms 16: learn: 0.6275046 total: 81.5ms remaining: 38.3ms 17: learn: 0.6246445 total: 86.4ms remaining: 33.6ms 18: learn: 0.6217869 total: 91.1ms remaining: 28.8ms 19: learn: 0.6193683 total: 95.8ms remaining: 24ms 20: learn: 0.6179878 total: 100ms remaining: 19.1ms 21: learn: 0.6162355 total: 105ms remaining: 14.3ms 22: learn: 0.6145635 total: 109ms remaining: 9.51ms 23: learn: 0.6127275 total: 114ms remaining: 4.74ms 24: learn: 0.6114964 total: 118ms remaining: 0us 0: learn: 0.6837982 total: 4.87ms remaining: 117ms 1: learn: 0.6765593 total: 9.77ms remaining: 112ms 2: learn: 0.6705135 total: 14.6ms remaining: 107ms 3: learn: 0.6652650 total: 19.6ms remaining: 103ms 4: learn: 0.6599831 total: 24.1ms remaining: 96.6ms 5: learn: 0.6559021 total: 28.7ms remaining: 91ms 6: learn: 0.6525644 total: 33.4ms remaining: 85.8ms 7: learn: 0.6488499 total: 38.3ms remaining: 81.5ms 8: learn: 0.6456072 total: 43.2ms remaining: 76.8ms 9: learn: 0.6429542 total: 47.9ms remaining: 71.8ms 10: learn: 0.6405839 total: 52.7ms remaining: 67.1ms 11: learn: 0.6382378 total: 57.8ms remaining: 62.6ms 12: learn: 0.6363475 total: 62.4ms remaining: 57.6ms 13: learn: 0.6336127 total: 66.9ms remaining: 52.6ms 14: learn: 0.6316472 total: 72ms remaining: 48ms 15: learn: 0.6301725 total: 76.7ms remaining: 43.1ms 16: learn: 0.6276099 total: 81.6ms remaining: 38.4ms 17: learn: 0.6253627 total: 86.3ms remaining: 33.6ms 18: learn: 0.6225300 total: 90.9ms remaining: 28.7ms 19: learn: 0.6212397 total: 95.6ms remaining: 23.9ms 20: learn: 0.6194863 total: 101ms remaining: 19.2ms 21: learn: 0.6180220 total: 105ms remaining: 14.3ms 22: learn: 0.6161519 total: 110ms remaining: 9.52ms 23: learn: 0.6141716 total: 114ms remaining: 4.74ms 24: learn: 0.6125460 total: 118ms remaining: 0us 0: learn: 0.6843003 total: 4.83ms remaining: 116ms 1: learn: 0.6772806 total: 9.75ms remaining: 112ms 2: learn: 0.6712385 total: 14.5ms remaining: 106ms 3: learn: 0.6665583 total: 19.1ms remaining: 100ms 4: learn: 0.6612645 total: 23.8ms remaining: 95.1ms 5: learn: 0.6574435 total: 28.7ms remaining: 91ms 6: learn: 0.6541627 total: 33.4ms remaining: 85.8ms 7: learn: 0.6506563 total: 38.1ms remaining: 80.9ms 8: learn: 0.6474845 total: 42.8ms remaining: 76.2ms 9: learn: 0.6449232 total: 47.5ms remaining: 71.3ms 10: learn: 0.6411716 total: 52.3ms remaining: 66.5ms 11: learn: 0.6386104 total: 56.7ms remaining: 61.4ms 12: learn: 0.6371543 total: 61.7ms remaining: 56.9ms 13: learn: 0.6344039 total: 66.9ms remaining: 52.5ms 14: learn: 0.6323422 total: 71.6ms remaining: 47.8ms 15: learn: 0.6307121 total: 76.2ms remaining: 42.9ms 16: learn: 0.6286112 total: 81.1ms remaining: 38.2ms 17: learn: 0.6263991 total: 85.8ms remaining: 33.4ms 18: learn: 0.6236455 total: 90.5ms remaining: 28.6ms 19: learn: 0.6222689 total: 95ms remaining: 23.7ms 20: learn: 0.6201993 total: 99.5ms remaining: 18.9ms 21: learn: 0.6184319 total: 104ms remaining: 14.2ms 22: learn: 0.6169642 total: 109ms remaining: 9.46ms 23: learn: 0.6151761 total: 113ms remaining: 4.71ms 24: learn: 0.6133052 total: 118ms remaining: 0us 0: learn: 0.6845521 total: 4.94ms remaining: 242ms 1: learn: 0.6775051 total: 10.3ms remaining: 246ms 2: learn: 0.6716113 total: 15.3ms remaining: 239ms 3: learn: 0.6662326 total: 20.2ms remaining: 232ms 4: learn: 0.6606733 total: 24.9ms remaining: 224ms 5: learn: 0.6565534 total: 29.9ms remaining: 219ms 6: learn: 0.6529964 total: 34.5ms remaining: 212ms 7: learn: 0.6500911 total: 39.2ms remaining: 206ms 8: learn: 0.6466694 total: 43.9ms remaining: 200ms 9: learn: 0.6441484 total: 48.3ms remaining: 193ms 10: learn: 0.6402131 total: 53.3ms remaining: 189ms 11: learn: 0.6379078 total: 58.2ms remaining: 184ms 12: learn: 0.6358432 total: 62.6ms remaining: 178ms 13: learn: 0.6343783 total: 67.1ms remaining: 173ms 14: learn: 0.6324987 total: 71.9ms remaining: 168ms 15: learn: 0.6302252 total: 76.6ms remaining: 163ms 16: learn: 0.6278113 total: 81.6ms remaining: 158ms 17: learn: 0.6256164 total: 86.3ms remaining: 153ms 18: learn: 0.6239967 total: 90.8ms remaining: 148ms 19: learn: 0.6220147 total: 95.1ms remaining: 143ms 20: learn: 0.6203100 total: 99.5ms remaining: 137ms 21: learn: 0.6187106 total: 104ms remaining: 132ms 22: learn: 0.6175532 total: 108ms remaining: 127ms 23: learn: 0.6157316 total: 112ms remaining: 121ms 24: learn: 0.6129482 total: 116ms remaining: 116ms 25: learn: 0.6120210 total: 121ms remaining: 111ms 26: learn: 0.6108230 total: 125ms remaining: 106ms 27: learn: 0.6093278 total: 129ms remaining: 101ms 28: learn: 0.6077799 total: 133ms remaining: 96.2ms 29: learn: 0.6065499 total: 137ms remaining: 91.3ms 30: learn: 0.6052389 total: 141ms remaining: 86.5ms 31: learn: 0.6034029 total: 146ms remaining: 81.9ms 32: learn: 0.6022029 total: 150ms remaining: 77.3ms 33: learn: 0.6013473 total: 154ms remaining: 72.5ms 34: learn: 0.6003732 total: 158ms remaining: 67.8ms 35: learn: 0.5990512 total: 162ms remaining: 63.1ms 36: learn: 0.5975049 total: 167ms remaining: 58.6ms 37: learn: 0.5959896 total: 171ms remaining: 53.9ms 38: learn: 0.5950600 total: 175ms remaining: 49.5ms 39: learn: 0.5935132 total: 180ms remaining: 45ms 40: learn: 0.5925498 total: 185ms remaining: 40.5ms 41: learn: 0.5911539 total: 189ms remaining: 36ms 42: learn: 0.5903574 total: 193ms remaining: 31.5ms 43: learn: 0.5893585 total: 197ms remaining: 26.9ms 44: learn: 0.5882358 total: 202ms remaining: 22.4ms 45: learn: 0.5873325 total: 206ms remaining: 17.9ms 46: learn: 0.5861092 total: 210ms remaining: 13.4ms 47: learn: 0.5848488 total: 215ms remaining: 8.94ms 48: learn: 0.5835245 total: 219ms remaining: 4.47ms 49: learn: 0.5825423 total: 223ms remaining: 0us 0: learn: 0.6837951 total: 4.72ms remaining: 231ms 1: learn: 0.6767637 total: 9.87ms remaining: 237ms 2: learn: 0.6709601 total: 14.6ms remaining: 229ms 3: learn: 0.6664329 total: 19.5ms remaining: 224ms 4: learn: 0.6607243 total: 24.1ms remaining: 217ms 5: learn: 0.6557170 total: 28.8ms remaining: 212ms 6: learn: 0.6520528 total: 33.6ms remaining: 206ms 7: learn: 0.6490042 total: 38.3ms remaining: 201ms 8: learn: 0.6455551 total: 43.3ms remaining: 197ms 9: learn: 0.6427830 total: 47.8ms remaining: 191ms 10: learn: 0.6399077 total: 52.5ms remaining: 186ms 11: learn: 0.6373909 total: 57.2ms remaining: 181ms 12: learn: 0.6351058 total: 62ms remaining: 176ms 13: learn: 0.6323107 total: 66.7ms remaining: 171ms 14: learn: 0.6302779 total: 71.4ms remaining: 167ms 15: learn: 0.6286723 total: 75.9ms remaining: 161ms 16: learn: 0.6269330 total: 80.6ms remaining: 156ms 17: learn: 0.6248633 total: 85.4ms remaining: 152ms 18: learn: 0.6224497 total: 90.2ms remaining: 147ms 19: learn: 0.6211614 total: 94.8ms remaining: 142ms 20: learn: 0.6204438 total: 99.1ms remaining: 137ms 21: learn: 0.6190248 total: 104ms remaining: 132ms 22: learn: 0.6169806 total: 108ms remaining: 127ms 23: learn: 0.6152174 total: 113ms remaining: 122ms 24: learn: 0.6136399 total: 117ms remaining: 117ms 25: learn: 0.6122470 total: 121ms remaining: 112ms 26: learn: 0.6098422 total: 126ms remaining: 107ms 27: learn: 0.6076600 total: 130ms remaining: 102ms 28: learn: 0.6065810 total: 134ms remaining: 97ms 29: learn: 0.6053562 total: 138ms remaining: 92ms 30: learn: 0.6038758 total: 142ms remaining: 87.1ms 31: learn: 0.6021953 total: 146ms remaining: 82.3ms 32: learn: 0.6011252 total: 151ms remaining: 77.6ms 33: learn: 0.6001437 total: 155ms remaining: 72.9ms 34: learn: 0.5989359 total: 159ms remaining: 68.2ms 35: learn: 0.5973824 total: 163ms remaining: 63.6ms 36: learn: 0.5963344 total: 168ms remaining: 58.9ms 37: learn: 0.5950411 total: 172ms remaining: 54.3ms 38: learn: 0.5943121 total: 176ms remaining: 49.6ms 39: learn: 0.5923122 total: 181ms remaining: 45.2ms 40: learn: 0.5913630 total: 186ms remaining: 40.7ms 41: learn: 0.5900274 total: 190ms remaining: 36.2ms 42: learn: 0.5892765 total: 195ms remaining: 31.7ms 43: learn: 0.5884283 total: 199ms remaining: 27.1ms 44: learn: 0.5871641 total: 203ms remaining: 22.6ms 45: learn: 0.5860506 total: 207ms remaining: 18ms 46: learn: 0.5850525 total: 212ms remaining: 13.5ms 47: learn: 0.5837311 total: 216ms remaining: 8.99ms 48: learn: 0.5829113 total: 220ms remaining: 4.49ms 49: learn: 0.5817887 total: 224ms remaining: 0us 0: learn: 0.6840213 total: 4.98ms remaining: 244ms 1: learn: 0.6769563 total: 10.2ms remaining: 244ms 2: learn: 0.6704229 total: 15.1ms remaining: 237ms 3: learn: 0.6659820 total: 19.9ms remaining: 229ms 4: learn: 0.6608086 total: 24.7ms remaining: 223ms 5: learn: 0.6568028 total: 29.8ms remaining: 218ms 6: learn: 0.6521713 total: 34.6ms remaining: 212ms 7: learn: 0.6487712 total: 39.5ms remaining: 207ms 8: learn: 0.6462796 total: 44.1ms remaining: 201ms 9: learn: 0.6437662 total: 48.9ms remaining: 196ms 10: learn: 0.6398717 total: 53.8ms remaining: 191ms 11: learn: 0.6376752 total: 58.8ms remaining: 186ms 12: learn: 0.6358466 total: 63.2ms remaining: 180ms 13: learn: 0.6333864 total: 67.9ms remaining: 174ms 14: learn: 0.6316757 total: 72.7ms remaining: 170ms 15: learn: 0.6301148 total: 77.4ms remaining: 164ms 16: learn: 0.6279136 total: 82.2ms remaining: 160ms 17: learn: 0.6257547 total: 86.9ms remaining: 154ms 18: learn: 0.6241292 total: 91.5ms remaining: 149ms 19: learn: 0.6218127 total: 96.3ms remaining: 144ms 20: learn: 0.6195606 total: 101ms remaining: 139ms 21: learn: 0.6178590 total: 105ms remaining: 133ms 22: learn: 0.6163946 total: 109ms remaining: 128ms 23: learn: 0.6140180 total: 114ms remaining: 123ms 24: learn: 0.6122791 total: 118ms remaining: 118ms 25: learn: 0.6112614 total: 122ms remaining: 113ms 26: learn: 0.6094782 total: 127ms remaining: 108ms 27: learn: 0.6078087 total: 131ms remaining: 103ms 28: learn: 0.6066659 total: 135ms remaining: 97.5ms 29: learn: 0.6053443 total: 139ms remaining: 92.5ms 30: learn: 0.6041714 total: 143ms remaining: 87.5ms 31: learn: 0.6021001 total: 147ms remaining: 82.8ms 32: learn: 0.6005396 total: 151ms remaining: 77.9ms 33: learn: 0.5993678 total: 155ms remaining: 73.1ms 34: learn: 0.5987600 total: 160ms remaining: 68.4ms 35: learn: 0.5974260 total: 164ms remaining: 63.7ms 36: learn: 0.5963513 total: 168ms remaining: 59.1ms 37: learn: 0.5950719 total: 172ms remaining: 54.4ms 38: learn: 0.5938908 total: 177ms remaining: 49.8ms 39: learn: 0.5925384 total: 181ms remaining: 45.2ms 40: learn: 0.5919412 total: 186ms remaining: 40.8ms 41: learn: 0.5912225 total: 190ms remaining: 36.2ms 42: learn: 0.5903078 total: 194ms remaining: 31.6ms 43: learn: 0.5889249 total: 199ms remaining: 27.2ms 44: learn: 0.5879766 total: 203ms remaining: 22.6ms 45: learn: 0.5866715 total: 208ms remaining: 18.1ms 46: learn: 0.5857485 total: 212ms remaining: 13.5ms 47: learn: 0.5842650 total: 216ms remaining: 9ms 48: learn: 0.5834557 total: 220ms remaining: 4.49ms 49: learn: 0.5828235 total: 224ms remaining: 0us 0: learn: 0.6841860 total: 5.14ms remaining: 252ms 1: learn: 0.6761862 total: 10.1ms remaining: 243ms 2: learn: 0.6702003 total: 15ms remaining: 235ms 3: learn: 0.6648719 total: 19.8ms remaining: 227ms 4: learn: 0.6595880 total: 24.5ms remaining: 221ms 5: learn: 0.6555562 total: 29.3ms remaining: 215ms 6: learn: 0.6520243 total: 33.9ms remaining: 208ms 7: learn: 0.6488524 total: 38.9ms remaining: 204ms 8: learn: 0.6456400 total: 43.6ms remaining: 199ms 9: learn: 0.6427984 total: 48.5ms remaining: 194ms 10: learn: 0.6402342 total: 53.4ms remaining: 189ms 11: learn: 0.6380172 total: 58.4ms remaining: 185ms 12: learn: 0.6360206 total: 62.9ms remaining: 179ms 13: learn: 0.6339117 total: 67.6ms remaining: 174ms 14: learn: 0.6317430 total: 72.6ms remaining: 169ms 15: learn: 0.6300417 total: 77.4ms remaining: 164ms 16: learn: 0.6281009 total: 82.3ms remaining: 160ms 17: learn: 0.6261908 total: 86.7ms remaining: 154ms 18: learn: 0.6235717 total: 91.7ms remaining: 150ms 19: learn: 0.6214241 total: 96ms remaining: 144ms 20: learn: 0.6198443 total: 100ms remaining: 138ms 21: learn: 0.6183196 total: 104ms remaining: 133ms 22: learn: 0.6168139 total: 109ms remaining: 127ms 23: learn: 0.6150096 total: 113ms remaining: 122ms 24: learn: 0.6132929 total: 117ms remaining: 117ms 25: learn: 0.6121201 total: 121ms remaining: 112ms 26: learn: 0.6112611 total: 125ms remaining: 107ms 27: learn: 0.6096525 total: 129ms remaining: 102ms 28: learn: 0.6074434 total: 134ms remaining: 97ms 29: learn: 0.6057332 total: 138ms remaining: 92.3ms 30: learn: 0.6043233 total: 143ms remaining: 87.4ms 31: learn: 0.6029073 total: 147ms remaining: 82.6ms 32: learn: 0.6019256 total: 151ms remaining: 77.7ms 33: learn: 0.6006495 total: 155ms remaining: 73ms 34: learn: 0.5998154 total: 160ms remaining: 68.4ms 35: learn: 0.5986569 total: 164ms remaining: 63.9ms 36: learn: 0.5970312 total: 169ms remaining: 59.2ms 37: learn: 0.5954187 total: 173ms remaining: 54.6ms 38: learn: 0.5946749 total: 177ms remaining: 49.9ms 39: learn: 0.5929745 total: 181ms remaining: 45.3ms 40: learn: 0.5921588 total: 186ms remaining: 40.8ms 41: learn: 0.5904367 total: 190ms remaining: 36.2ms 42: learn: 0.5894918 total: 195ms remaining: 31.7ms 43: learn: 0.5885777 total: 199ms remaining: 27.1ms 44: learn: 0.5877255 total: 204ms remaining: 22.7ms 45: learn: 0.5863412 total: 208ms remaining: 18.1ms 46: learn: 0.5854721 total: 212ms remaining: 13.6ms 47: learn: 0.5846844 total: 216ms remaining: 9.02ms 48: learn: 0.5836536 total: 221ms remaining: 4.51ms 49: learn: 0.5826703 total: 225ms remaining: 0us 0: learn: 0.6834265 total: 4.83ms remaining: 237ms 1: learn: 0.6759055 total: 10ms remaining: 241ms 2: learn: 0.6698694 total: 14.8ms remaining: 232ms 3: learn: 0.6642132 total: 19.4ms remaining: 224ms 4: learn: 0.6589228 total: 24.3ms remaining: 219ms 5: learn: 0.6546309 total: 29.4ms remaining: 215ms 6: learn: 0.6511029 total: 34.3ms remaining: 211ms 7: learn: 0.6475361 total: 39.1ms remaining: 205ms 8: learn: 0.6442616 total: 43.8ms remaining: 200ms 9: learn: 0.6416422 total: 48.3ms remaining: 193ms 10: learn: 0.6390249 total: 53.1ms remaining: 188ms 11: learn: 0.6353742 total: 58.1ms remaining: 184ms 12: learn: 0.6339076 total: 62.6ms remaining: 178ms 13: learn: 0.6312348 total: 67.4ms remaining: 173ms 14: learn: 0.6294345 total: 72.2ms remaining: 168ms 15: learn: 0.6280725 total: 76.9ms remaining: 163ms 16: learn: 0.6261755 total: 81.5ms remaining: 158ms 17: learn: 0.6242236 total: 86.3ms remaining: 153ms 18: learn: 0.6226401 total: 90.8ms remaining: 148ms 19: learn: 0.6216806 total: 95.4ms remaining: 143ms 20: learn: 0.6195315 total: 99.9ms remaining: 138ms 21: learn: 0.6179573 total: 104ms remaining: 133ms 22: learn: 0.6166702 total: 109ms remaining: 127ms 23: learn: 0.6150086 total: 113ms remaining: 122ms 24: learn: 0.6131657 total: 117ms remaining: 117ms 25: learn: 0.6116258 total: 121ms remaining: 112ms 26: learn: 0.6093126 total: 125ms remaining: 107ms 27: learn: 0.6074338 total: 130ms remaining: 102ms 28: learn: 0.6058295 total: 134ms remaining: 97ms 29: learn: 0.6048119 total: 138ms remaining: 92ms 30: learn: 0.6033921 total: 142ms remaining: 87.1ms 31: learn: 0.6021788 total: 146ms remaining: 82.2ms 32: learn: 0.6009651 total: 150ms remaining: 77.5ms 33: learn: 0.5997813 total: 155ms remaining: 72.9ms 34: learn: 0.5989234 total: 159ms remaining: 68.2ms 35: learn: 0.5975226 total: 164ms remaining: 63.6ms 36: learn: 0.5964116 total: 168ms remaining: 58.9ms 37: learn: 0.5953556 total: 172ms remaining: 54.3ms 38: learn: 0.5937981 total: 176ms remaining: 49.6ms 39: learn: 0.5927213 total: 180ms remaining: 45ms 40: learn: 0.5915773 total: 184ms remaining: 40.5ms 41: learn: 0.5907300 total: 189ms remaining: 36ms 42: learn: 0.5897510 total: 194ms remaining: 31.5ms 43: learn: 0.5885648 total: 198ms remaining: 27ms 44: learn: 0.5872752 total: 203ms remaining: 22.5ms 45: learn: 0.5861127 total: 207ms remaining: 18ms 46: learn: 0.5843099 total: 211ms remaining: 13.5ms 47: learn: 0.5831998 total: 215ms remaining: 8.98ms 48: learn: 0.5820317 total: 220ms remaining: 4.49ms 49: learn: 0.5812362 total: 224ms remaining: 0us 0: learn: 0.6842268 total: 5.11ms remaining: 250ms 1: learn: 0.6760565 total: 10.3ms remaining: 246ms 2: learn: 0.6701379 total: 15.1ms remaining: 237ms 3: learn: 0.6652193 total: 19.8ms remaining: 227ms 4: learn: 0.6598572 total: 24.5ms remaining: 221ms 5: learn: 0.6557990 total: 29.4ms remaining: 215ms 6: learn: 0.6520352 total: 34.5ms remaining: 212ms 7: learn: 0.6487328 total: 39.7ms remaining: 208ms 8: learn: 0.6451650 total: 44.4ms remaining: 202ms 9: learn: 0.6424713 total: 49ms remaining: 196ms 10: learn: 0.6398143 total: 53.9ms remaining: 191ms 11: learn: 0.6374917 total: 58.8ms remaining: 186ms 12: learn: 0.6355606 total: 63.3ms remaining: 180ms 13: learn: 0.6329508 total: 68.2ms remaining: 175ms 14: learn: 0.6308641 total: 73ms remaining: 170ms 15: learn: 0.6292939 total: 77.7ms remaining: 165ms 16: learn: 0.6271362 total: 82.4ms remaining: 160ms 17: learn: 0.6251023 total: 87.1ms remaining: 155ms 18: learn: 0.6233438 total: 91.6ms remaining: 150ms 19: learn: 0.6221818 total: 96ms remaining: 144ms 20: learn: 0.6202375 total: 101ms remaining: 139ms 21: learn: 0.6187741 total: 105ms remaining: 134ms 22: learn: 0.6168694 total: 110ms remaining: 129ms 23: learn: 0.6146085 total: 114ms remaining: 123ms 24: learn: 0.6133933 total: 118ms remaining: 118ms 25: learn: 0.6116213 total: 122ms remaining: 113ms 26: learn: 0.6096084 total: 126ms remaining: 108ms 27: learn: 0.6081402 total: 131ms remaining: 103ms 28: learn: 0.6070976 total: 135ms remaining: 97.7ms 29: learn: 0.6062765 total: 139ms remaining: 92.7ms 30: learn: 0.6048817 total: 143ms remaining: 87.7ms 31: learn: 0.6028570 total: 147ms remaining: 82.9ms 32: learn: 0.6012104 total: 151ms remaining: 78ms 33: learn: 0.5998941 total: 156ms remaining: 73.2ms 34: learn: 0.5988594 total: 160ms remaining: 68.5ms 35: learn: 0.5977308 total: 164ms remaining: 63.8ms 36: learn: 0.5958376 total: 168ms remaining: 59.1ms 37: learn: 0.5948838 total: 173ms remaining: 54.5ms 38: learn: 0.5932107 total: 177ms remaining: 49.9ms 39: learn: 0.5922998 total: 181ms remaining: 45.2ms 40: learn: 0.5899832 total: 185ms remaining: 40.6ms 41: learn: 0.5892909 total: 190ms remaining: 36.1ms 42: learn: 0.5879966 total: 194ms remaining: 31.7ms 43: learn: 0.5870721 total: 199ms remaining: 27.1ms 44: learn: 0.5858891 total: 204ms remaining: 22.6ms 45: learn: 0.5845394 total: 208ms remaining: 18.1ms 46: learn: 0.5836553 total: 212ms remaining: 13.5ms 47: learn: 0.5827720 total: 216ms remaining: 9ms 48: learn: 0.5818858 total: 220ms remaining: 4.49ms 49: learn: 0.5808819 total: 224ms remaining: 0us 0: learn: 0.6834081 total: 4.99ms remaining: 245ms 1: learn: 0.6761479 total: 9.97ms remaining: 239ms 2: learn: 0.6702549 total: 14.4ms remaining: 226ms 3: learn: 0.6646763 total: 19.4ms remaining: 223ms 4: learn: 0.6595166 total: 23.8ms remaining: 214ms 5: learn: 0.6554302 total: 29.1ms remaining: 214ms 6: learn: 0.6516216 total: 34.1ms remaining: 209ms 7: learn: 0.6483806 total: 39.1ms remaining: 205ms 8: learn: 0.6450570 total: 43.9ms remaining: 200ms 9: learn: 0.6425214 total: 48.4ms remaining: 194ms 10: learn: 0.6398332 total: 53.2ms remaining: 189ms 11: learn: 0.6376388 total: 57.9ms remaining: 183ms 12: learn: 0.6356101 total: 62.4ms remaining: 178ms 13: learn: 0.6330268 total: 67.1ms remaining: 173ms 14: learn: 0.6309585 total: 72ms remaining: 168ms 15: learn: 0.6293844 total: 76.8ms remaining: 163ms 16: learn: 0.6272470 total: 81.5ms remaining: 158ms 17: learn: 0.6252113 total: 86.4ms remaining: 154ms 18: learn: 0.6235501 total: 90.8ms remaining: 148ms 19: learn: 0.6224487 total: 95.1ms remaining: 143ms 20: learn: 0.6208627 total: 99.7ms remaining: 138ms 21: learn: 0.6193045 total: 104ms remaining: 133ms 22: learn: 0.6179148 total: 109ms remaining: 128ms 23: learn: 0.6161479 total: 113ms remaining: 123ms 24: learn: 0.6143506 total: 118ms remaining: 118ms 25: learn: 0.6126537 total: 122ms remaining: 113ms 26: learn: 0.6101299 total: 126ms remaining: 108ms 27: learn: 0.6085759 total: 130ms remaining: 102ms 28: learn: 0.6075076 total: 135ms remaining: 97.4ms 29: learn: 0.6065810 total: 139ms remaining: 92.3ms 30: learn: 0.6054474 total: 143ms remaining: 87.4ms 31: learn: 0.6030406 total: 147ms remaining: 82.6ms 32: learn: 0.6016368 total: 151ms remaining: 77.9ms 33: learn: 0.6001212 total: 155ms remaining: 73.1ms 34: learn: 0.5993824 total: 160ms remaining: 68.4ms 35: learn: 0.5975299 total: 164ms remaining: 63.9ms 36: learn: 0.5961852 total: 169ms remaining: 59.3ms 37: learn: 0.5952289 total: 173ms remaining: 54.6ms 38: learn: 0.5939332 total: 177ms remaining: 50ms 39: learn: 0.5924742 total: 181ms remaining: 45.4ms 40: learn: 0.5914719 total: 186ms remaining: 40.7ms 41: learn: 0.5907320 total: 190ms remaining: 36.2ms 42: learn: 0.5896136 total: 195ms remaining: 31.7ms 43: learn: 0.5885114 total: 199ms remaining: 27.2ms 44: learn: 0.5869633 total: 204ms remaining: 22.7ms 45: learn: 0.5856667 total: 208ms remaining: 18.1ms 46: learn: 0.5846986 total: 212ms remaining: 13.6ms 47: learn: 0.5835777 total: 217ms remaining: 9.02ms 48: learn: 0.5830552 total: 221ms remaining: 4.5ms 49: learn: 0.5818761 total: 225ms remaining: 0us 0: learn: 0.6835852 total: 5.12ms remaining: 251ms 1: learn: 0.6766025 total: 10.3ms remaining: 247ms 2: learn: 0.6701163 total: 15.1ms remaining: 236ms 3: learn: 0.6646611 total: 20ms remaining: 230ms 4: learn: 0.6594722 total: 24.8ms remaining: 223ms 5: learn: 0.6553730 total: 29.4ms remaining: 216ms 6: learn: 0.6514609 total: 34ms remaining: 209ms 7: learn: 0.6477935 total: 38.9ms remaining: 204ms 8: learn: 0.6441283 total: 43.5ms remaining: 198ms 9: learn: 0.6413718 total: 48.2ms remaining: 193ms 10: learn: 0.6386036 total: 52.9ms remaining: 188ms 11: learn: 0.6363458 total: 57.9ms remaining: 183ms 12: learn: 0.6347685 total: 62.6ms remaining: 178ms 13: learn: 0.6326723 total: 67.3ms remaining: 173ms 14: learn: 0.6307543 total: 72.3ms remaining: 169ms 15: learn: 0.6292493 total: 77.2ms remaining: 164ms 16: learn: 0.6275046 total: 82.1ms remaining: 159ms 17: learn: 0.6246445 total: 87.1ms remaining: 155ms 18: learn: 0.6217869 total: 91.9ms remaining: 150ms 19: learn: 0.6193683 total: 96.8ms remaining: 145ms 20: learn: 0.6179878 total: 102ms remaining: 141ms 21: learn: 0.6162355 total: 106ms remaining: 135ms 22: learn: 0.6145635 total: 111ms remaining: 130ms 23: learn: 0.6127275 total: 115ms remaining: 125ms 24: learn: 0.6114964 total: 120ms remaining: 120ms 25: learn: 0.6097516 total: 124ms remaining: 114ms 26: learn: 0.6081714 total: 128ms remaining: 109ms 27: learn: 0.6058599 total: 132ms remaining: 104ms 28: learn: 0.6039688 total: 137ms remaining: 99ms 29: learn: 0.6031747 total: 141ms remaining: 93.9ms 30: learn: 0.6016681 total: 145ms remaining: 88.8ms 31: learn: 0.6002829 total: 149ms remaining: 83.8ms 32: learn: 0.5985697 total: 153ms remaining: 79ms 33: learn: 0.5969388 total: 158ms remaining: 74.3ms 34: learn: 0.5961342 total: 162ms remaining: 69.6ms 35: learn: 0.5949221 total: 167ms remaining: 64.8ms 36: learn: 0.5935271 total: 171ms remaining: 60.1ms 37: learn: 0.5920261 total: 176ms remaining: 55.5ms 38: learn: 0.5906630 total: 180ms remaining: 50.9ms 39: learn: 0.5895734 total: 185ms remaining: 46.3ms 40: learn: 0.5884717 total: 189ms remaining: 41.6ms 41: learn: 0.5879146 total: 194ms remaining: 36.9ms 42: learn: 0.5870413 total: 198ms remaining: 32.3ms 43: learn: 0.5858552 total: 203ms remaining: 27.6ms 44: learn: 0.5844067 total: 207ms remaining: 23ms 45: learn: 0.5829307 total: 211ms remaining: 18.4ms 46: learn: 0.5820499 total: 216ms remaining: 13.8ms 47: learn: 0.5811154 total: 220ms remaining: 9.15ms 48: learn: 0.5798581 total: 224ms remaining: 4.57ms 49: learn: 0.5789032 total: 228ms remaining: 0us 0: learn: 0.6837982 total: 4.95ms remaining: 242ms 1: learn: 0.6765593 total: 10.1ms remaining: 241ms 2: learn: 0.6705135 total: 15ms remaining: 234ms 3: learn: 0.6652650 total: 19.9ms remaining: 229ms 4: learn: 0.6599831 total: 24.2ms remaining: 218ms 5: learn: 0.6559021 total: 29ms remaining: 213ms 6: learn: 0.6525644 total: 33.9ms remaining: 209ms 7: learn: 0.6488499 total: 38.7ms remaining: 203ms 8: learn: 0.6456072 total: 43.6ms remaining: 199ms 9: learn: 0.6429542 total: 48.5ms remaining: 194ms 10: learn: 0.6405839 total: 53.3ms remaining: 189ms 11: learn: 0.6382378 total: 58.3ms remaining: 184ms 12: learn: 0.6363475 total: 62.9ms remaining: 179ms 13: learn: 0.6336127 total: 67.5ms remaining: 173ms 14: learn: 0.6316472 total: 72.3ms remaining: 169ms 15: learn: 0.6301725 total: 77ms remaining: 164ms 16: learn: 0.6276099 total: 81.9ms remaining: 159ms 17: learn: 0.6253627 total: 86.7ms remaining: 154ms 18: learn: 0.6225300 total: 91.4ms remaining: 149ms 19: learn: 0.6212397 total: 95.7ms remaining: 144ms 20: learn: 0.6194863 total: 100ms remaining: 139ms 21: learn: 0.6180220 total: 105ms remaining: 134ms 22: learn: 0.6161519 total: 109ms remaining: 128ms 23: learn: 0.6141716 total: 114ms remaining: 123ms 24: learn: 0.6125460 total: 118ms remaining: 118ms 25: learn: 0.6101566 total: 123ms remaining: 113ms 26: learn: 0.6087517 total: 127ms remaining: 108ms 27: learn: 0.6075427 total: 131ms remaining: 103ms 28: learn: 0.6061585 total: 135ms remaining: 97.7ms 29: learn: 0.6052805 total: 139ms remaining: 92.7ms 30: learn: 0.6039742 total: 143ms remaining: 87.7ms 31: learn: 0.6022500 total: 147ms remaining: 82.9ms 32: learn: 0.6015583 total: 151ms remaining: 78ms 33: learn: 0.5999749 total: 156ms remaining: 73.3ms 34: learn: 0.5991698 total: 160ms remaining: 68.5ms 35: learn: 0.5971400 total: 164ms remaining: 63.8ms 36: learn: 0.5959429 total: 168ms remaining: 59.1ms 37: learn: 0.5950512 total: 173ms remaining: 54.5ms 38: learn: 0.5934701 total: 178ms remaining: 50.2ms 39: learn: 0.5921884 total: 183ms remaining: 45.7ms 40: learn: 0.5914204 total: 188ms remaining: 41.2ms 41: learn: 0.5902608 total: 192ms remaining: 36.6ms 42: learn: 0.5895551 total: 196ms remaining: 31.9ms 43: learn: 0.5881662 total: 200ms remaining: 27.3ms 44: learn: 0.5870671 total: 205ms remaining: 22.7ms 45: learn: 0.5862987 total: 209ms remaining: 18.1ms 46: learn: 0.5848223 total: 213ms remaining: 13.6ms 47: learn: 0.5842771 total: 218ms remaining: 9.08ms 48: learn: 0.5835120 total: 222ms remaining: 4.53ms 49: learn: 0.5826993 total: 226ms remaining: 0us 0: learn: 0.6843003 total: 5.04ms remaining: 247ms 1: learn: 0.6772806 total: 10.2ms remaining: 246ms 2: learn: 0.6712385 total: 15.1ms remaining: 237ms 3: learn: 0.6665583 total: 20.2ms remaining: 233ms 4: learn: 0.6612645 total: 24.8ms remaining: 223ms 5: learn: 0.6574435 total: 29.4ms remaining: 215ms 6: learn: 0.6541627 total: 34.2ms remaining: 210ms 7: learn: 0.6506563 total: 39ms remaining: 205ms 8: learn: 0.6474845 total: 43.7ms remaining: 199ms 9: learn: 0.6449232 total: 48.2ms remaining: 193ms 10: learn: 0.6411716 total: 53.1ms remaining: 188ms 11: learn: 0.6386104 total: 57.8ms remaining: 183ms 12: learn: 0.6371543 total: 62.4ms remaining: 178ms 13: learn: 0.6344039 total: 66.9ms remaining: 172ms 14: learn: 0.6323422 total: 71.8ms remaining: 168ms 15: learn: 0.6307121 total: 76.5ms remaining: 162ms 16: learn: 0.6286112 total: 81.6ms remaining: 158ms 17: learn: 0.6263991 total: 86.5ms remaining: 154ms 18: learn: 0.6236455 total: 91.4ms remaining: 149ms 19: learn: 0.6222689 total: 96.2ms remaining: 144ms 20: learn: 0.6201993 total: 101ms remaining: 139ms 21: learn: 0.6184319 total: 105ms remaining: 133ms 22: learn: 0.6169642 total: 109ms remaining: 128ms 23: learn: 0.6151761 total: 113ms remaining: 122ms 24: learn: 0.6133052 total: 117ms remaining: 117ms 25: learn: 0.6114190 total: 121ms remaining: 112ms 26: learn: 0.6102635 total: 126ms remaining: 107ms 27: learn: 0.6088248 total: 130ms remaining: 102ms 28: learn: 0.6076640 total: 134ms remaining: 97.1ms 29: learn: 0.6062278 total: 139ms remaining: 92.3ms 30: learn: 0.6049668 total: 143ms remaining: 87.5ms 31: learn: 0.6040744 total: 147ms remaining: 82.7ms 32: learn: 0.6031347 total: 151ms remaining: 77.9ms 33: learn: 0.6015689 total: 155ms remaining: 73.1ms 34: learn: 0.6009190 total: 159ms remaining: 68.3ms 35: learn: 0.5998143 total: 164ms remaining: 63.6ms 36: learn: 0.5981429 total: 168ms remaining: 59ms 37: learn: 0.5973589 total: 172ms remaining: 54.3ms 38: learn: 0.5968114 total: 176ms remaining: 49.7ms 39: learn: 0.5954514 total: 181ms remaining: 45.2ms 40: learn: 0.5947214 total: 185ms remaining: 40.7ms 41: learn: 0.5941441 total: 190ms remaining: 36.1ms 42: learn: 0.5933506 total: 194ms remaining: 31.6ms 43: learn: 0.5920626 total: 198ms remaining: 27.1ms 44: learn: 0.5904508 total: 203ms remaining: 22.5ms 45: learn: 0.5890399 total: 207ms remaining: 18ms 46: learn: 0.5882163 total: 211ms remaining: 13.5ms 47: learn: 0.5863513 total: 216ms remaining: 8.98ms 48: learn: 0.5857402 total: 220ms remaining: 4.48ms 49: learn: 0.5839189 total: 224ms remaining: 0us 0: learn: 0.6922439 total: 4.88ms remaining: 43.9ms 1: learn: 0.6912791 total: 10.2ms remaining: 40.8ms 2: learn: 0.6904271 total: 14.9ms remaining: 34.8ms 3: learn: 0.6895976 total: 19.9ms remaining: 29.8ms 4: learn: 0.6887498 total: 24.6ms remaining: 24.6ms 5: learn: 0.6879243 total: 29ms remaining: 19.3ms 6: learn: 0.6870282 total: 33.9ms remaining: 14.5ms 7: learn: 0.6862114 total: 38.8ms remaining: 9.71ms 8: learn: 0.6853793 total: 43.5ms remaining: 4.83ms 9: learn: 0.6846136 total: 48.4ms remaining: 0us 0: learn: 0.6921684 total: 4.94ms remaining: 44.5ms 1: learn: 0.6912253 total: 10ms remaining: 40.1ms 2: learn: 0.6903892 total: 15ms remaining: 34.9ms 3: learn: 0.6895426 total: 19.8ms remaining: 29.7ms 4: learn: 0.6885473 total: 24.5ms remaining: 24.5ms 5: learn: 0.6876889 total: 29.3ms remaining: 19.5ms 6: learn: 0.6868115 total: 34.1ms remaining: 14.6ms 7: learn: 0.6859590 total: 38.9ms remaining: 9.73ms 8: learn: 0.6851790 total: 43.7ms remaining: 4.86ms 9: learn: 0.6844368 total: 48.4ms remaining: 0us 0: learn: 0.6921915 total: 4.86ms remaining: 43.8ms 1: learn: 0.6912218 total: 10.2ms remaining: 40.8ms 2: learn: 0.6903686 total: 15.3ms remaining: 35.7ms 3: learn: 0.6895449 total: 20ms remaining: 30ms 4: learn: 0.6885902 total: 24.7ms remaining: 24.7ms 5: learn: 0.6877352 total: 29.3ms remaining: 19.5ms 6: learn: 0.6868474 total: 33.9ms remaining: 14.5ms 7: learn: 0.6859982 total: 38.5ms remaining: 9.63ms 8: learn: 0.6852113 total: 43.3ms remaining: 4.82ms 9: learn: 0.6844197 total: 47.8ms remaining: 0us 0: learn: 0.6922095 total: 4.77ms remaining: 42.9ms 1: learn: 0.6913359 total: 9.91ms remaining: 39.6ms 2: learn: 0.6904863 total: 14.6ms remaining: 34ms 3: learn: 0.6896036 total: 19.4ms remaining: 29.1ms 4: learn: 0.6886344 total: 23.9ms remaining: 23.9ms 5: learn: 0.6877722 total: 28.7ms remaining: 19.1ms 6: learn: 0.6869225 total: 33.5ms remaining: 14.4ms 7: learn: 0.6860490 total: 38.3ms remaining: 9.57ms 8: learn: 0.6852582 total: 43ms remaining: 4.77ms 9: learn: 0.6845180 total: 47.8ms remaining: 0us 0: learn: 0.6921339 total: 5.13ms remaining: 46.2ms 1: learn: 0.6912187 total: 10.4ms remaining: 41.5ms 2: learn: 0.6903531 total: 15.4ms remaining: 35.9ms 3: learn: 0.6895050 total: 20.2ms remaining: 30.3ms 4: learn: 0.6885765 total: 25ms remaining: 25ms 5: learn: 0.6877394 total: 29.7ms remaining: 19.8ms 6: learn: 0.6869272 total: 34.3ms remaining: 14.7ms 7: learn: 0.6860480 total: 39.4ms remaining: 9.85ms 8: learn: 0.6852818 total: 44ms remaining: 4.89ms 9: learn: 0.6845411 total: 48.8ms remaining: 0us 0: learn: 0.6922128 total: 4.91ms remaining: 44.2ms 1: learn: 0.6912299 total: 10.2ms remaining: 40.8ms 2: learn: 0.6903811 total: 14.8ms remaining: 34.5ms 3: learn: 0.6894953 total: 19.5ms remaining: 29.2ms 4: learn: 0.6885255 total: 24.2ms remaining: 24.2ms 5: learn: 0.6876646 total: 29ms remaining: 19.3ms 6: learn: 0.6868438 total: 33.8ms remaining: 14.5ms 7: learn: 0.6860070 total: 38.5ms remaining: 9.64ms 8: learn: 0.6852140 total: 43.2ms remaining: 4.8ms 9: learn: 0.6844072 total: 47.7ms remaining: 0us 0: learn: 0.6921310 total: 4.92ms remaining: 44.3ms 1: learn: 0.6912397 total: 10.1ms remaining: 40.5ms 2: learn: 0.6903965 total: 15.1ms remaining: 35.3ms 3: learn: 0.6896206 total: 20.2ms remaining: 30.4ms 4: learn: 0.6887421 total: 24.8ms remaining: 24.8ms 5: learn: 0.6878662 total: 29.6ms remaining: 19.7ms 6: learn: 0.6870295 total: 34.3ms remaining: 14.7ms 7: learn: 0.6861785 total: 39.1ms remaining: 9.78ms 8: learn: 0.6853700 total: 43.8ms remaining: 4.87ms 9: learn: 0.6846076 total: 48.5ms remaining: 0us 0: learn: 0.6921470 total: 5.04ms remaining: 45.3ms 1: learn: 0.6912489 total: 10.2ms remaining: 40.8ms 2: learn: 0.6903650 total: 14.9ms remaining: 34.9ms 3: learn: 0.6894674 total: 19.6ms remaining: 29.4ms 4: learn: 0.6885049 total: 24.1ms remaining: 24.1ms 5: learn: 0.6876261 total: 29.1ms remaining: 19.4ms 6: learn: 0.6867983 total: 33.7ms remaining: 14.4ms 7: learn: 0.6859387 total: 38.6ms remaining: 9.64ms 8: learn: 0.6851703 total: 43.2ms remaining: 4.8ms 9: learn: 0.6843971 total: 47.7ms remaining: 0us 0: learn: 0.6921715 total: 4.88ms remaining: 44ms 1: learn: 0.6912816 total: 10.2ms remaining: 40.8ms 2: learn: 0.6904107 total: 15ms remaining: 35.1ms 3: learn: 0.6895320 total: 19.9ms remaining: 29.9ms 4: learn: 0.6886191 total: 24.7ms remaining: 24.7ms 5: learn: 0.6877583 total: 29.3ms remaining: 19.5ms 6: learn: 0.6869483 total: 34.1ms remaining: 14.6ms 7: learn: 0.6860728 total: 38.8ms remaining: 9.71ms 8: learn: 0.6852892 total: 43.5ms remaining: 4.83ms 9: learn: 0.6845102 total: 48ms remaining: 0us 0: learn: 0.6922217 total: 4.84ms remaining: 43.5ms 1: learn: 0.6913853 total: 9.88ms remaining: 39.5ms 2: learn: 0.6905140 total: 14.7ms remaining: 34.2ms 3: learn: 0.6897418 total: 19.5ms remaining: 29.3ms 4: learn: 0.6887937 total: 24.4ms remaining: 24.4ms 5: learn: 0.6879609 total: 29.4ms remaining: 19.6ms 6: learn: 0.6871608 total: 34.8ms remaining: 14.9ms 7: learn: 0.6863368 total: 39.5ms remaining: 9.87ms 8: learn: 0.6855830 total: 44ms remaining: 4.89ms 9: learn: 0.6848045 total: 48.7ms remaining: 0us 0: learn: 0.6922439 total: 4.91ms remaining: 118ms 1: learn: 0.6912791 total: 10.1ms remaining: 116ms 2: learn: 0.6904271 total: 15ms remaining: 110ms 3: learn: 0.6895976 total: 19.6ms remaining: 103ms 4: learn: 0.6887498 total: 24.2ms remaining: 97ms 5: learn: 0.6879243 total: 28.9ms remaining: 91.5ms 6: learn: 0.6870282 total: 33.8ms remaining: 87ms 7: learn: 0.6862114 total: 38.7ms remaining: 82.1ms 8: learn: 0.6853793 total: 43.5ms remaining: 77.4ms 9: learn: 0.6846136 total: 48.3ms remaining: 72.5ms 10: learn: 0.6838231 total: 53ms remaining: 67.4ms 11: learn: 0.6830346 total: 57.5ms remaining: 62.2ms 12: learn: 0.6822582 total: 62.3ms remaining: 57.5ms 13: learn: 0.6815971 total: 67.1ms remaining: 52.7ms 14: learn: 0.6808392 total: 71.5ms remaining: 47.7ms 15: learn: 0.6801840 total: 76.2ms remaining: 42.8ms 16: learn: 0.6795233 total: 80.8ms remaining: 38ms 17: learn: 0.6788068 total: 85.5ms remaining: 33.3ms 18: learn: 0.6780775 total: 90.3ms remaining: 28.5ms 19: learn: 0.6773762 total: 95.1ms remaining: 23.8ms 20: learn: 0.6767597 total: 99.7ms remaining: 19ms 21: learn: 0.6760314 total: 104ms remaining: 14.2ms 22: learn: 0.6754202 total: 109ms remaining: 9.44ms 23: learn: 0.6748241 total: 113ms remaining: 4.7ms 24: learn: 0.6741729 total: 117ms remaining: 0us 0: learn: 0.6921684 total: 4.72ms remaining: 113ms 1: learn: 0.6912253 total: 9.82ms remaining: 113ms 2: learn: 0.6903892 total: 14.6ms remaining: 107ms 3: learn: 0.6895426 total: 19.5ms remaining: 102ms 4: learn: 0.6885473 total: 24.2ms remaining: 96.7ms 5: learn: 0.6876889 total: 28.8ms remaining: 91.2ms 6: learn: 0.6868115 total: 33.4ms remaining: 85.8ms 7: learn: 0.6859590 total: 38.2ms remaining: 81.2ms 8: learn: 0.6851790 total: 42.9ms remaining: 76.2ms 9: learn: 0.6844368 total: 47.6ms remaining: 71.3ms 10: learn: 0.6836522 total: 52.3ms remaining: 66.5ms 11: learn: 0.6828691 total: 57.1ms remaining: 61.8ms 12: learn: 0.6821469 total: 61.9ms remaining: 57.1ms 13: learn: 0.6814688 total: 66.7ms remaining: 52.4ms 14: learn: 0.6806998 total: 71.5ms remaining: 47.7ms 15: learn: 0.6800428 total: 77.1ms remaining: 43.4ms 16: learn: 0.6793789 total: 82ms remaining: 38.6ms 17: learn: 0.6786113 total: 86.7ms remaining: 33.7ms 18: learn: 0.6778886 total: 91.3ms remaining: 28.8ms 19: learn: 0.6771615 total: 95.5ms remaining: 23.9ms 20: learn: 0.6765337 total: 99.6ms remaining: 19ms 21: learn: 0.6759078 total: 104ms remaining: 14.2ms 22: learn: 0.6753461 total: 108ms remaining: 9.39ms 23: learn: 0.6747445 total: 112ms remaining: 4.67ms 24: learn: 0.6740925 total: 116ms remaining: 0us 0: learn: 0.6921915 total: 5ms remaining: 120ms 1: learn: 0.6912218 total: 10.2ms remaining: 117ms 2: learn: 0.6903686 total: 15ms remaining: 110ms 3: learn: 0.6895449 total: 19.7ms remaining: 104ms 4: learn: 0.6885902 total: 24.4ms remaining: 97.8ms 5: learn: 0.6877352 total: 29.3ms remaining: 92.8ms 6: learn: 0.6868474 total: 34.1ms remaining: 87.6ms 7: learn: 0.6859982 total: 38.8ms remaining: 82.5ms 8: learn: 0.6852113 total: 43.6ms remaining: 77.5ms 9: learn: 0.6844197 total: 48.4ms remaining: 72.6ms 10: learn: 0.6836805 total: 53ms remaining: 67.4ms 11: learn: 0.6829227 total: 57.6ms remaining: 62.4ms 12: learn: 0.6822036 total: 62.4ms remaining: 57.6ms 13: learn: 0.6814596 total: 67.4ms remaining: 52.9ms 14: learn: 0.6806853 total: 72.2ms remaining: 48.1ms 15: learn: 0.6800364 total: 77.1ms remaining: 43.4ms 16: learn: 0.6794000 total: 81.8ms remaining: 38.5ms 17: learn: 0.6785859 total: 86.6ms remaining: 33.7ms 18: learn: 0.6778708 total: 91.3ms remaining: 28.8ms 19: learn: 0.6771354 total: 95.7ms remaining: 23.9ms 20: learn: 0.6765014 total: 100ms remaining: 19ms 21: learn: 0.6757977 total: 105ms remaining: 14.3ms 22: learn: 0.6751375 total: 109ms remaining: 9.49ms 23: learn: 0.6744798 total: 113ms remaining: 4.71ms 24: learn: 0.6738306 total: 117ms remaining: 0us 0: learn: 0.6922095 total: 4.96ms remaining: 119ms 1: learn: 0.6913359 total: 10.2ms remaining: 117ms 2: learn: 0.6904863 total: 15.1ms remaining: 111ms 3: learn: 0.6896036 total: 19.8ms remaining: 104ms 4: learn: 0.6886344 total: 24.5ms remaining: 98.1ms 5: learn: 0.6877722 total: 29.2ms remaining: 92.5ms 6: learn: 0.6869225 total: 33.9ms remaining: 87.2ms 7: learn: 0.6860490 total: 38.7ms remaining: 82.3ms 8: learn: 0.6852582 total: 43.6ms remaining: 77.4ms 9: learn: 0.6845180 total: 48.1ms remaining: 72.2ms 10: learn: 0.6837038 total: 52.9ms remaining: 67.3ms 11: learn: 0.6829396 total: 57.4ms remaining: 62.2ms 12: learn: 0.6821706 total: 62ms remaining: 57.2ms 13: learn: 0.6813913 total: 66.6ms remaining: 52.4ms 14: learn: 0.6806357 total: 71.8ms remaining: 47.9ms 15: learn: 0.6799482 total: 76.8ms remaining: 43.2ms 16: learn: 0.6793076 total: 81.6ms remaining: 38.4ms 17: learn: 0.6786010 total: 86.5ms remaining: 33.6ms 18: learn: 0.6778735 total: 91.3ms remaining: 28.8ms 19: learn: 0.6771448 total: 95.7ms remaining: 23.9ms 20: learn: 0.6764868 total: 99.9ms remaining: 19ms 21: learn: 0.6757550 total: 104ms remaining: 14.2ms 22: learn: 0.6750211 total: 108ms remaining: 9.43ms 23: learn: 0.6742810 total: 113ms remaining: 4.69ms 24: learn: 0.6736197 total: 117ms remaining: 0us 0: learn: 0.6921339 total: 4.88ms remaining: 117ms 1: learn: 0.6912187 total: 10.2ms remaining: 117ms 2: learn: 0.6903531 total: 15.1ms remaining: 111ms 3: learn: 0.6895050 total: 20ms remaining: 105ms 4: learn: 0.6885765 total: 25ms remaining: 100ms 5: learn: 0.6877394 total: 30ms remaining: 95ms 6: learn: 0.6869272 total: 34.9ms remaining: 89.8ms 7: learn: 0.6860480 total: 39.9ms remaining: 84.8ms 8: learn: 0.6852818 total: 44.5ms remaining: 79.2ms 9: learn: 0.6845411 total: 49.1ms remaining: 73.6ms 10: learn: 0.6837185 total: 54ms remaining: 68.7ms 11: learn: 0.6829477 total: 58.7ms remaining: 63.6ms 12: learn: 0.6822440 total: 63.3ms remaining: 58.4ms 13: learn: 0.6815716 total: 67.9ms remaining: 53.3ms 14: learn: 0.6807578 total: 72.6ms remaining: 48.4ms 15: learn: 0.6801071 total: 77.2ms remaining: 43.4ms 16: learn: 0.6794542 total: 81.9ms remaining: 38.5ms 17: learn: 0.6787546 total: 86.8ms remaining: 33.8ms 18: learn: 0.6780793 total: 91.6ms remaining: 28.9ms 19: learn: 0.6774017 total: 96.4ms remaining: 24.1ms 20: learn: 0.6767758 total: 101ms remaining: 19.2ms 21: learn: 0.6761543 total: 105ms remaining: 14.3ms 22: learn: 0.6754610 total: 109ms remaining: 9.5ms 23: learn: 0.6747966 total: 113ms remaining: 4.72ms 24: learn: 0.6741518 total: 117ms remaining: 0us 0: learn: 0.6922128 total: 4.77ms remaining: 114ms 1: learn: 0.6912299 total: 9.98ms remaining: 115ms 2: learn: 0.6903811 total: 14.7ms remaining: 108ms 3: learn: 0.6894953 total: 19.4ms remaining: 102ms 4: learn: 0.6885255 total: 24ms remaining: 96ms 5: learn: 0.6876646 total: 28.8ms remaining: 91.3ms 6: learn: 0.6868438 total: 33.8ms remaining: 86.8ms 7: learn: 0.6860070 total: 38.7ms remaining: 82.2ms 8: learn: 0.6852140 total: 43.3ms remaining: 77ms 9: learn: 0.6844072 total: 47.9ms remaining: 71.8ms 10: learn: 0.6836543 total: 52.6ms remaining: 67ms 11: learn: 0.6828847 total: 57.2ms remaining: 61.9ms 12: learn: 0.6821372 total: 62ms remaining: 57.3ms 13: learn: 0.6813281 total: 66.9ms remaining: 52.6ms 14: learn: 0.6805638 total: 72ms remaining: 48ms 15: learn: 0.6798995 total: 76.7ms remaining: 43.1ms 16: learn: 0.6792429 total: 81.3ms remaining: 38.3ms 17: learn: 0.6785070 total: 86ms remaining: 33.5ms 18: learn: 0.6777665 total: 90.7ms remaining: 28.6ms 19: learn: 0.6770392 total: 94.8ms remaining: 23.7ms 20: learn: 0.6763773 total: 99ms remaining: 18.9ms 21: learn: 0.6756642 total: 103ms remaining: 14.1ms 22: learn: 0.6749865 total: 108ms remaining: 9.36ms 23: learn: 0.6743155 total: 112ms remaining: 4.66ms 24: learn: 0.6736512 total: 116ms remaining: 0us 0: learn: 0.6921310 total: 5.11ms remaining: 123ms 1: learn: 0.6912397 total: 10.3ms remaining: 119ms 2: learn: 0.6903965 total: 15.2ms remaining: 111ms 3: learn: 0.6896206 total: 20.1ms remaining: 106ms 4: learn: 0.6887421 total: 25ms remaining: 100ms 5: learn: 0.6878662 total: 29.7ms remaining: 94.2ms 6: learn: 0.6870295 total: 34.6ms remaining: 88.9ms 7: learn: 0.6861785 total: 39.6ms remaining: 84.2ms 8: learn: 0.6853700 total: 44.5ms remaining: 79.2ms 9: learn: 0.6846076 total: 49.2ms remaining: 73.8ms 10: learn: 0.6838417 total: 54ms remaining: 68.8ms 11: learn: 0.6830864 total: 58.8ms remaining: 63.7ms 12: learn: 0.6823174 total: 63.6ms remaining: 58.7ms 13: learn: 0.6815698 total: 68.2ms remaining: 53.6ms 14: learn: 0.6808035 total: 72.8ms remaining: 48.5ms 15: learn: 0.6801315 total: 77.6ms remaining: 43.6ms 16: learn: 0.6793890 total: 82.3ms remaining: 38.7ms 17: learn: 0.6786807 total: 86.7ms remaining: 33.7ms 18: learn: 0.6779780 total: 91.5ms remaining: 28.9ms 19: learn: 0.6772382 total: 96.1ms remaining: 24ms 20: learn: 0.6765820 total: 100ms remaining: 19.1ms 21: learn: 0.6758901 total: 105ms remaining: 14.3ms 22: learn: 0.6752426 total: 109ms remaining: 9.46ms 23: learn: 0.6745617 total: 113ms remaining: 4.7ms 24: learn: 0.6739205 total: 117ms remaining: 0us 0: learn: 0.6921470 total: 5.22ms remaining: 125ms 1: learn: 0.6912489 total: 10.5ms remaining: 121ms 2: learn: 0.6903650 total: 15.3ms remaining: 112ms 3: learn: 0.6894674 total: 20.2ms remaining: 106ms 4: learn: 0.6885049 total: 25.1ms remaining: 100ms 5: learn: 0.6876261 total: 29.8ms remaining: 94.4ms 6: learn: 0.6867983 total: 34.7ms remaining: 89.1ms 7: learn: 0.6859387 total: 39.5ms remaining: 83.8ms 8: learn: 0.6851703 total: 44.2ms remaining: 78.5ms 9: learn: 0.6843971 total: 49.1ms remaining: 73.6ms 10: learn: 0.6836360 total: 53.7ms remaining: 68.3ms 11: learn: 0.6828538 total: 58.1ms remaining: 62.9ms 12: learn: 0.6821209 total: 62.8ms remaining: 58ms 13: learn: 0.6813259 total: 67.8ms remaining: 53.3ms 14: learn: 0.6805428 total: 72.9ms remaining: 48.6ms 15: learn: 0.6798709 total: 77.6ms remaining: 43.7ms 16: learn: 0.6792006 total: 82.4ms remaining: 38.8ms 17: learn: 0.6784006 total: 87.2ms remaining: 33.9ms 18: learn: 0.6776681 total: 92ms remaining: 29ms 19: learn: 0.6769385 total: 96.5ms remaining: 24.1ms 20: learn: 0.6763158 total: 101ms remaining: 19.2ms 21: learn: 0.6756694 total: 105ms remaining: 14.4ms 22: learn: 0.6750333 total: 109ms remaining: 9.51ms 23: learn: 0.6743659 total: 114ms remaining: 4.73ms 24: learn: 0.6737026 total: 118ms remaining: 0us 0: learn: 0.6921715 total: 5.02ms remaining: 121ms 1: learn: 0.6912816 total: 10.2ms remaining: 118ms 2: learn: 0.6904107 total: 15.3ms remaining: 112ms 3: learn: 0.6895320 total: 20.3ms remaining: 107ms 4: learn: 0.6886191 total: 25.1ms remaining: 100ms 5: learn: 0.6877583 total: 29.8ms remaining: 94.3ms 6: learn: 0.6869483 total: 34.5ms remaining: 88.8ms 7: learn: 0.6860728 total: 39.4ms remaining: 83.7ms 8: learn: 0.6852892 total: 44.1ms remaining: 78.4ms 9: learn: 0.6845102 total: 48.8ms remaining: 73.2ms 10: learn: 0.6837364 total: 53.6ms remaining: 68.2ms 11: learn: 0.6829679 total: 58.2ms remaining: 63.1ms 12: learn: 0.6822290 total: 63ms remaining: 58.1ms 13: learn: 0.6814190 total: 67.8ms remaining: 53.3ms 14: learn: 0.6806480 total: 72.6ms remaining: 48.4ms 15: learn: 0.6799887 total: 77.6ms remaining: 43.6ms 16: learn: 0.6793341 total: 82.4ms remaining: 38.8ms 17: learn: 0.6786181 total: 87.2ms remaining: 33.9ms 18: learn: 0.6779427 total: 92.2ms remaining: 29.1ms 19: learn: 0.6772188 total: 96.7ms remaining: 24.2ms 20: learn: 0.6765775 total: 101ms remaining: 19.3ms 21: learn: 0.6759528 total: 106ms remaining: 14.5ms 22: learn: 0.6752654 total: 111ms remaining: 9.63ms 23: learn: 0.6746043 total: 115ms remaining: 4.8ms 24: learn: 0.6739620 total: 119ms remaining: 0us 0: learn: 0.6922217 total: 5.11ms remaining: 123ms 1: learn: 0.6913853 total: 10.3ms remaining: 118ms 2: learn: 0.6905140 total: 15.1ms remaining: 111ms 3: learn: 0.6897418 total: 19.8ms remaining: 104ms 4: learn: 0.6887937 total: 24.6ms remaining: 98.4ms 5: learn: 0.6879609 total: 29.7ms remaining: 94.2ms 6: learn: 0.6871608 total: 34.4ms remaining: 88.5ms 7: learn: 0.6863368 total: 39.1ms remaining: 83.2ms 8: learn: 0.6855830 total: 43.8ms remaining: 77.8ms 9: learn: 0.6848045 total: 48.2ms remaining: 72.3ms 10: learn: 0.6840721 total: 53.2ms remaining: 67.7ms 11: learn: 0.6833443 total: 57.8ms remaining: 62.6ms 12: learn: 0.6826200 total: 62.6ms remaining: 57.8ms 13: learn: 0.6818612 total: 67.4ms remaining: 52.9ms 14: learn: 0.6811520 total: 72.3ms remaining: 48.2ms 15: learn: 0.6805039 total: 77ms remaining: 43.3ms 16: learn: 0.6798648 total: 81.9ms remaining: 38.6ms 17: learn: 0.6792198 total: 86.9ms remaining: 33.8ms 18: learn: 0.6785304 total: 92ms remaining: 29.1ms 19: learn: 0.6778163 total: 96.9ms remaining: 24.2ms 20: learn: 0.6771316 total: 101ms remaining: 19.3ms 21: learn: 0.6764934 total: 106ms remaining: 14.4ms 22: learn: 0.6758187 total: 110ms remaining: 9.56ms 23: learn: 0.6751007 total: 114ms remaining: 4.76ms 24: learn: 0.6744817 total: 118ms remaining: 0us 0: learn: 0.6922439 total: 4.83ms remaining: 237ms 1: learn: 0.6912791 total: 9.94ms remaining: 239ms 2: learn: 0.6904271 total: 14.8ms remaining: 232ms 3: learn: 0.6895976 total: 19.4ms remaining: 223ms 4: learn: 0.6887498 total: 24.1ms remaining: 217ms 5: learn: 0.6879243 total: 28.9ms remaining: 212ms 6: learn: 0.6870282 total: 33.8ms remaining: 207ms 7: learn: 0.6862114 total: 38.6ms remaining: 203ms 8: learn: 0.6853793 total: 43.3ms remaining: 197ms 9: learn: 0.6846136 total: 48ms remaining: 192ms 10: learn: 0.6838231 total: 52.9ms remaining: 188ms 11: learn: 0.6830346 total: 57.5ms remaining: 182ms 12: learn: 0.6822582 total: 65.1ms remaining: 185ms 13: learn: 0.6815971 total: 69.8ms remaining: 179ms 14: learn: 0.6808392 total: 74.6ms remaining: 174ms 15: learn: 0.6801840 total: 79.4ms remaining: 169ms 16: learn: 0.6795233 total: 84ms remaining: 163ms 17: learn: 0.6788068 total: 88.8ms remaining: 158ms 18: learn: 0.6780775 total: 93.3ms remaining: 152ms 19: learn: 0.6773762 total: 97.5ms remaining: 146ms 20: learn: 0.6767597 total: 102ms remaining: 140ms 21: learn: 0.6760314 total: 106ms remaining: 135ms 22: learn: 0.6754202 total: 110ms remaining: 129ms 23: learn: 0.6748241 total: 114ms remaining: 124ms 24: learn: 0.6741729 total: 118ms remaining: 118ms 25: learn: 0.6735459 total: 122ms remaining: 113ms 26: learn: 0.6729397 total: 127ms remaining: 108ms 27: learn: 0.6723007 total: 131ms remaining: 103ms 28: learn: 0.6717124 total: 135ms remaining: 97.9ms 29: learn: 0.6711570 total: 139ms remaining: 92.9ms 30: learn: 0.6706122 total: 144ms remaining: 88ms 31: learn: 0.6700892 total: 148ms remaining: 83.1ms 32: learn: 0.6695233 total: 152ms remaining: 78.3ms 33: learn: 0.6688916 total: 156ms remaining: 73.5ms 34: learn: 0.6683868 total: 160ms remaining: 68.8ms 35: learn: 0.6678167 total: 165ms remaining: 64ms 36: learn: 0.6672551 total: 169ms remaining: 59.3ms 37: learn: 0.6667515 total: 173ms remaining: 54.7ms 38: learn: 0.6662960 total: 177ms remaining: 50ms 39: learn: 0.6657863 total: 182ms remaining: 45.5ms 40: learn: 0.6653171 total: 186ms remaining: 40.9ms 41: learn: 0.6648339 total: 191ms remaining: 36.4ms 42: learn: 0.6643165 total: 196ms remaining: 31.8ms 43: learn: 0.6637275 total: 200ms remaining: 27.3ms 44: learn: 0.6632503 total: 204ms remaining: 22.7ms 45: learn: 0.6627237 total: 208ms remaining: 18.1ms 46: learn: 0.6622761 total: 213ms remaining: 13.6ms 47: learn: 0.6617788 total: 217ms remaining: 9.03ms 48: learn: 0.6613543 total: 221ms remaining: 4.51ms 49: learn: 0.6608922 total: 225ms remaining: 0us 0: learn: 0.6921684 total: 5.06ms remaining: 248ms 1: learn: 0.6912253 total: 10.4ms remaining: 249ms 2: learn: 0.6903892 total: 15.9ms remaining: 249ms 3: learn: 0.6895426 total: 20.4ms remaining: 235ms 4: learn: 0.6885473 total: 25.3ms remaining: 228ms 5: learn: 0.6876889 total: 30.2ms remaining: 221ms 6: learn: 0.6868115 total: 35.1ms remaining: 216ms 7: learn: 0.6859590 total: 39.9ms remaining: 210ms 8: learn: 0.6851790 total: 44.7ms remaining: 204ms 9: learn: 0.6844368 total: 49.4ms remaining: 198ms 10: learn: 0.6836522 total: 54.2ms remaining: 192ms 11: learn: 0.6828691 total: 58.9ms remaining: 187ms 12: learn: 0.6821469 total: 63.9ms remaining: 182ms 13: learn: 0.6814688 total: 68.6ms remaining: 176ms 14: learn: 0.6806998 total: 73.5ms remaining: 171ms 15: learn: 0.6800428 total: 78.4ms remaining: 167ms 16: learn: 0.6793789 total: 83.3ms remaining: 162ms 17: learn: 0.6786113 total: 88.1ms remaining: 157ms 18: learn: 0.6778886 total: 92.9ms remaining: 152ms 19: learn: 0.6771615 total: 97.6ms remaining: 146ms 20: learn: 0.6765337 total: 102ms remaining: 141ms 21: learn: 0.6759078 total: 107ms remaining: 136ms 22: learn: 0.6753461 total: 111ms remaining: 131ms 23: learn: 0.6747445 total: 115ms remaining: 125ms 24: learn: 0.6740925 total: 120ms remaining: 120ms 25: learn: 0.6734870 total: 124ms remaining: 115ms 26: learn: 0.6728823 total: 128ms remaining: 109ms 27: learn: 0.6722531 total: 132ms remaining: 104ms 28: learn: 0.6716637 total: 137ms remaining: 98.9ms 29: learn: 0.6711269 total: 141ms remaining: 93.8ms 30: learn: 0.6705661 total: 145ms remaining: 88.7ms 31: learn: 0.6700228 total: 149ms remaining: 83.8ms 32: learn: 0.6693697 total: 153ms remaining: 79ms 33: learn: 0.6687695 total: 158ms remaining: 74.2ms 34: learn: 0.6682751 total: 162ms remaining: 69.4ms 35: learn: 0.6677339 total: 166ms remaining: 64.7ms 36: learn: 0.6671709 total: 171ms remaining: 60ms 37: learn: 0.6666636 total: 175ms remaining: 55.3ms 38: learn: 0.6661483 total: 180ms remaining: 50.6ms 39: learn: 0.6656811 total: 184ms remaining: 45.9ms 40: learn: 0.6652170 total: 189ms remaining: 41.4ms 41: learn: 0.6647262 total: 193ms remaining: 36.8ms 42: learn: 0.6642602 total: 197ms remaining: 32.1ms 43: learn: 0.6636529 total: 202ms remaining: 27.5ms 44: learn: 0.6631906 total: 206ms remaining: 22.9ms 45: learn: 0.6627530 total: 210ms remaining: 18.3ms 46: learn: 0.6623231 total: 214ms remaining: 13.7ms 47: learn: 0.6618649 total: 219ms remaining: 9.11ms 48: learn: 0.6614444 total: 223ms remaining: 4.55ms 49: learn: 0.6610440 total: 227ms remaining: 0us 0: learn: 0.6921915 total: 4.94ms remaining: 242ms 1: learn: 0.6912218 total: 10.1ms remaining: 244ms 2: learn: 0.6903686 total: 15ms remaining: 234ms 3: learn: 0.6895449 total: 19.7ms remaining: 227ms 4: learn: 0.6885902 total: 24.4ms remaining: 220ms 5: learn: 0.6877352 total: 29.4ms remaining: 215ms 6: learn: 0.6868474 total: 34.4ms remaining: 211ms 7: learn: 0.6859982 total: 39ms remaining: 205ms 8: learn: 0.6852113 total: 43.7ms remaining: 199ms 9: learn: 0.6844197 total: 48.3ms remaining: 193ms 10: learn: 0.6836805 total: 53ms remaining: 188ms 11: learn: 0.6829227 total: 57.5ms remaining: 182ms 12: learn: 0.6822036 total: 62.5ms remaining: 178ms 13: learn: 0.6814596 total: 67.1ms remaining: 173ms 14: learn: 0.6806853 total: 71.8ms remaining: 168ms 15: learn: 0.6800364 total: 76.6ms remaining: 163ms 16: learn: 0.6794000 total: 81.3ms remaining: 158ms 17: learn: 0.6785859 total: 86.1ms remaining: 153ms 18: learn: 0.6778708 total: 91ms remaining: 148ms 19: learn: 0.6771354 total: 95.5ms remaining: 143ms 20: learn: 0.6765014 total: 100ms remaining: 138ms 21: learn: 0.6757977 total: 105ms remaining: 133ms 22: learn: 0.6751375 total: 109ms remaining: 128ms 23: learn: 0.6744798 total: 113ms remaining: 123ms 24: learn: 0.6738306 total: 118ms remaining: 118ms 25: learn: 0.6732398 total: 122ms remaining: 113ms 26: learn: 0.6726372 total: 127ms remaining: 108ms 27: learn: 0.6720409 total: 131ms remaining: 103ms 28: learn: 0.6714244 total: 135ms remaining: 98ms 29: learn: 0.6708700 total: 139ms remaining: 93ms 30: learn: 0.6703568 total: 144ms remaining: 88.1ms 31: learn: 0.6697886 total: 148ms remaining: 83.3ms 32: learn: 0.6691506 total: 152ms remaining: 78.5ms 33: learn: 0.6686153 total: 156ms remaining: 73.6ms 34: learn: 0.6681226 total: 161ms remaining: 68.8ms 35: learn: 0.6675948 total: 165ms remaining: 64.1ms 36: learn: 0.6670620 total: 169ms remaining: 59.4ms 37: learn: 0.6665548 total: 173ms remaining: 54.7ms 38: learn: 0.6660335 total: 178ms remaining: 50.2ms 39: learn: 0.6655698 total: 182ms remaining: 45.6ms 40: learn: 0.6650961 total: 187ms remaining: 41ms 41: learn: 0.6646009 total: 191ms remaining: 36.5ms 42: learn: 0.6641496 total: 196ms remaining: 31.9ms 43: learn: 0.6635837 total: 200ms remaining: 27.3ms 44: learn: 0.6631225 total: 204ms remaining: 22.7ms 45: learn: 0.6626734 total: 209ms remaining: 18.1ms 46: learn: 0.6622226 total: 213ms remaining: 13.6ms 47: learn: 0.6617659 total: 217ms remaining: 9.05ms 48: learn: 0.6612847 total: 221ms remaining: 4.52ms 49: learn: 0.6608828 total: 226ms remaining: 0us 0: learn: 0.6922095 total: 4.96ms remaining: 243ms 1: learn: 0.6913359 total: 10.2ms remaining: 245ms 2: learn: 0.6904863 total: 15ms remaining: 234ms 3: learn: 0.6896036 total: 19.9ms remaining: 229ms 4: learn: 0.6886344 total: 24.7ms remaining: 223ms 5: learn: 0.6877722 total: 29.5ms remaining: 216ms 6: learn: 0.6869225 total: 34.1ms remaining: 209ms 7: learn: 0.6860490 total: 38.8ms remaining: 204ms 8: learn: 0.6852582 total: 43.6ms remaining: 199ms 9: learn: 0.6845180 total: 48.6ms remaining: 194ms 10: learn: 0.6837038 total: 53.2ms remaining: 189ms 11: learn: 0.6829396 total: 57.9ms remaining: 183ms 12: learn: 0.6821706 total: 63ms remaining: 179ms 13: learn: 0.6813913 total: 67.9ms remaining: 175ms 14: learn: 0.6806357 total: 72.6ms remaining: 169ms 15: learn: 0.6799482 total: 77.4ms remaining: 165ms 16: learn: 0.6793076 total: 82ms remaining: 159ms 17: learn: 0.6786010 total: 86.5ms remaining: 154ms 18: learn: 0.6778735 total: 91.1ms remaining: 149ms 19: learn: 0.6771448 total: 95.6ms remaining: 143ms 20: learn: 0.6764868 total: 100ms remaining: 138ms 21: learn: 0.6757550 total: 105ms remaining: 133ms 22: learn: 0.6750211 total: 109ms remaining: 128ms 23: learn: 0.6742810 total: 113ms remaining: 123ms 24: learn: 0.6736197 total: 117ms remaining: 117ms 25: learn: 0.6729620 total: 122ms remaining: 112ms 26: learn: 0.6723543 total: 126ms remaining: 107ms 27: learn: 0.6717461 total: 131ms remaining: 103ms 28: learn: 0.6711394 total: 135ms remaining: 97.5ms 29: learn: 0.6705920 total: 139ms remaining: 92.6ms 30: learn: 0.6700690 total: 143ms remaining: 87.7ms 31: learn: 0.6695283 total: 147ms remaining: 82.7ms 32: learn: 0.6688699 total: 151ms remaining: 78ms 33: learn: 0.6683015 total: 156ms remaining: 73.2ms 34: learn: 0.6678060 total: 160ms remaining: 68.5ms 35: learn: 0.6672920 total: 164ms remaining: 63.8ms 36: learn: 0.6667459 total: 168ms remaining: 59.1ms 37: learn: 0.6662301 total: 172ms remaining: 54.4ms 38: learn: 0.6656917 total: 176ms remaining: 49.8ms 39: learn: 0.6652031 total: 181ms remaining: 45.4ms 40: learn: 0.6647530 total: 186ms remaining: 40.9ms 41: learn: 0.6642591 total: 191ms remaining: 36.4ms 42: learn: 0.6637344 total: 196ms remaining: 31.9ms 43: learn: 0.6631644 total: 201ms remaining: 27.4ms 44: learn: 0.6627135 total: 206ms remaining: 22.8ms 45: learn: 0.6622633 total: 210ms remaining: 18.2ms 46: learn: 0.6617828 total: 214ms remaining: 13.6ms 47: learn: 0.6613083 total: 218ms remaining: 9.09ms 48: learn: 0.6609234 total: 222ms remaining: 4.54ms 49: learn: 0.6605122 total: 227ms remaining: 0us 0: learn: 0.6921339 total: 5.31ms remaining: 260ms 1: learn: 0.6912187 total: 10.6ms remaining: 253ms 2: learn: 0.6903531 total: 15.4ms remaining: 242ms 3: learn: 0.6895050 total: 19.9ms remaining: 228ms 4: learn: 0.6885765 total: 25ms remaining: 225ms 5: learn: 0.6877394 total: 29.9ms remaining: 219ms 6: learn: 0.6869272 total: 34.7ms remaining: 213ms 7: learn: 0.6860480 total: 39.4ms remaining: 207ms 8: learn: 0.6852818 total: 44ms remaining: 200ms 9: learn: 0.6845411 total: 48.5ms remaining: 194ms 10: learn: 0.6837185 total: 53.4ms remaining: 189ms 11: learn: 0.6829477 total: 58ms remaining: 184ms 12: learn: 0.6822440 total: 62.7ms remaining: 179ms 13: learn: 0.6815716 total: 67.6ms remaining: 174ms 14: learn: 0.6807578 total: 72.2ms remaining: 168ms 15: learn: 0.6801071 total: 77.2ms remaining: 164ms 16: learn: 0.6794542 total: 82ms remaining: 159ms 17: learn: 0.6787546 total: 87.1ms remaining: 155ms 18: learn: 0.6780793 total: 91.9ms remaining: 150ms 19: learn: 0.6774017 total: 96.7ms remaining: 145ms 20: learn: 0.6767758 total: 101ms remaining: 139ms 21: learn: 0.6761543 total: 106ms remaining: 134ms 22: learn: 0.6754610 total: 110ms remaining: 129ms 23: learn: 0.6747966 total: 114ms remaining: 124ms 24: learn: 0.6741518 total: 119ms remaining: 119ms 25: learn: 0.6734567 total: 123ms remaining: 113ms 26: learn: 0.6728458 total: 127ms remaining: 109ms 27: learn: 0.6722219 total: 132ms remaining: 103ms 28: learn: 0.6716340 total: 136ms remaining: 98.5ms 29: learn: 0.6710855 total: 140ms remaining: 93.5ms 30: learn: 0.6704961 total: 145ms remaining: 88.6ms 31: learn: 0.6699639 total: 149ms remaining: 83.8ms 32: learn: 0.6693118 total: 153ms remaining: 78.9ms 33: learn: 0.6687480 total: 157ms remaining: 74ms 34: learn: 0.6682496 total: 161ms remaining: 69.2ms 35: learn: 0.6677075 total: 166ms remaining: 64.4ms 36: learn: 0.6671413 total: 170ms remaining: 59.7ms 37: learn: 0.6666271 total: 174ms remaining: 55ms 38: learn: 0.6660989 total: 178ms remaining: 50.3ms 39: learn: 0.6656477 total: 182ms remaining: 45.6ms 40: learn: 0.6651708 total: 187ms remaining: 41.1ms 41: learn: 0.6646786 total: 192ms remaining: 36.5ms 42: learn: 0.6642182 total: 197ms remaining: 32ms 43: learn: 0.6636348 total: 201ms remaining: 27.4ms 44: learn: 0.6631428 total: 205ms remaining: 22.8ms 45: learn: 0.6626796 total: 209ms remaining: 18.2ms 46: learn: 0.6622442 total: 213ms remaining: 13.6ms 47: learn: 0.6617816 total: 218ms remaining: 9.07ms 48: learn: 0.6613199 total: 222ms remaining: 4.52ms 49: learn: 0.6609064 total: 226ms remaining: 0us 0: learn: 0.6922128 total: 4.89ms remaining: 240ms 1: learn: 0.6912299 total: 9.87ms remaining: 237ms 2: learn: 0.6903811 total: 14.6ms remaining: 229ms 3: learn: 0.6894953 total: 19.2ms remaining: 220ms 4: learn: 0.6885255 total: 23.7ms remaining: 213ms 5: learn: 0.6876646 total: 28.7ms remaining: 210ms 6: learn: 0.6868438 total: 33.6ms remaining: 206ms 7: learn: 0.6860070 total: 38.3ms remaining: 201ms 8: learn: 0.6852140 total: 43.2ms remaining: 197ms 9: learn: 0.6844072 total: 48.1ms remaining: 192ms 10: learn: 0.6836543 total: 52.8ms remaining: 187ms 11: learn: 0.6828847 total: 57.3ms remaining: 181ms 12: learn: 0.6821372 total: 62ms remaining: 176ms 13: learn: 0.6813281 total: 66.7ms remaining: 172ms 14: learn: 0.6805638 total: 71.5ms remaining: 167ms 15: learn: 0.6798995 total: 76.2ms remaining: 162ms 16: learn: 0.6792429 total: 80.8ms remaining: 157ms 17: learn: 0.6785070 total: 85.4ms remaining: 152ms 18: learn: 0.6777665 total: 89.9ms remaining: 147ms 19: learn: 0.6770392 total: 94.6ms remaining: 142ms 20: learn: 0.6763773 total: 98.9ms remaining: 137ms 21: learn: 0.6756642 total: 104ms remaining: 132ms 22: learn: 0.6749865 total: 108ms remaining: 127ms 23: learn: 0.6743155 total: 112ms remaining: 122ms 24: learn: 0.6736512 total: 117ms remaining: 117ms 25: learn: 0.6730438 total: 121ms remaining: 112ms 26: learn: 0.6724381 total: 126ms remaining: 107ms 27: learn: 0.6718160 total: 130ms remaining: 102ms 28: learn: 0.6711705 total: 134ms remaining: 97.3ms 29: learn: 0.6706165 total: 139ms remaining: 92.5ms 30: learn: 0.6700294 total: 143ms remaining: 87.7ms 31: learn: 0.6694833 total: 147ms remaining: 82.9ms 32: learn: 0.6688254 total: 152ms remaining: 78.2ms 33: learn: 0.6682733 total: 156ms remaining: 73.5ms 34: learn: 0.6677738 total: 160ms remaining: 68.8ms 35: learn: 0.6672576 total: 165ms remaining: 64.1ms 36: learn: 0.6667161 total: 169ms remaining: 59.4ms 37: learn: 0.6661915 total: 173ms remaining: 54.8ms 38: learn: 0.6656646 total: 178ms remaining: 50.1ms 39: learn: 0.6652114 total: 182ms remaining: 45.6ms 40: learn: 0.6647327 total: 187ms remaining: 41ms 41: learn: 0.6642357 total: 191ms remaining: 36.5ms 42: learn: 0.6637735 total: 196ms remaining: 31.9ms 43: learn: 0.6632008 total: 200ms remaining: 27.3ms 44: learn: 0.6627405 total: 204ms remaining: 22.7ms 45: learn: 0.6623154 total: 209ms remaining: 18.1ms 46: learn: 0.6618164 total: 213ms remaining: 13.6ms 47: learn: 0.6613367 total: 217ms remaining: 9.03ms 48: learn: 0.6608526 total: 221ms remaining: 4.51ms 49: learn: 0.6604459 total: 225ms remaining: 0us 0: learn: 0.6921310 total: 5.04ms remaining: 247ms 1: learn: 0.6912397 total: 10ms remaining: 241ms 2: learn: 0.6903965 total: 14.9ms remaining: 233ms 3: learn: 0.6896206 total: 19.6ms remaining: 226ms 4: learn: 0.6887421 total: 24.5ms remaining: 220ms 5: learn: 0.6878662 total: 29.3ms remaining: 215ms 6: learn: 0.6870295 total: 34ms remaining: 209ms 7: learn: 0.6861785 total: 38.8ms remaining: 204ms 8: learn: 0.6853700 total: 43.4ms remaining: 198ms 9: learn: 0.6846076 total: 48ms remaining: 192ms 10: learn: 0.6838417 total: 52.8ms remaining: 187ms 11: learn: 0.6830864 total: 57.2ms remaining: 181ms 12: learn: 0.6823174 total: 62.1ms remaining: 177ms 13: learn: 0.6815698 total: 66.8ms remaining: 172ms 14: learn: 0.6808035 total: 71.5ms remaining: 167ms 15: learn: 0.6801315 total: 76.3ms remaining: 162ms 16: learn: 0.6793890 total: 81ms remaining: 157ms 17: learn: 0.6786807 total: 85.8ms remaining: 153ms 18: learn: 0.6779780 total: 90.7ms remaining: 148ms 19: learn: 0.6772382 total: 95.5ms remaining: 143ms 20: learn: 0.6765820 total: 99.8ms remaining: 138ms 21: learn: 0.6758901 total: 104ms remaining: 133ms 22: learn: 0.6752426 total: 109ms remaining: 128ms 23: learn: 0.6745617 total: 113ms remaining: 123ms 24: learn: 0.6739205 total: 118ms remaining: 118ms 25: learn: 0.6732927 total: 122ms remaining: 112ms 26: learn: 0.6726284 total: 126ms remaining: 107ms 27: learn: 0.6720154 total: 130ms remaining: 102ms 28: learn: 0.6713608 total: 134ms remaining: 97.2ms 29: learn: 0.6707958 total: 139ms remaining: 92.4ms 30: learn: 0.6702091 total: 143ms remaining: 87.5ms 31: learn: 0.6696944 total: 147ms remaining: 82.7ms 32: learn: 0.6690599 total: 151ms remaining: 77.9ms 33: learn: 0.6684931 total: 155ms remaining: 73.1ms 34: learn: 0.6679823 total: 160ms remaining: 68.4ms 35: learn: 0.6674496 total: 164ms remaining: 63.7ms 36: learn: 0.6669042 total: 168ms remaining: 59ms 37: learn: 0.6663357 total: 172ms remaining: 54.4ms 38: learn: 0.6658012 total: 176ms remaining: 49.7ms 39: learn: 0.6653470 total: 180ms remaining: 45.1ms 40: learn: 0.6648629 total: 185ms remaining: 40.6ms 41: learn: 0.6643450 total: 190ms remaining: 36.1ms 42: learn: 0.6638752 total: 194ms remaining: 31.6ms 43: learn: 0.6632935 total: 199ms remaining: 27.1ms 44: learn: 0.6628004 total: 203ms remaining: 22.5ms 45: learn: 0.6623534 total: 207ms remaining: 18ms 46: learn: 0.6618987 total: 212ms remaining: 13.5ms 47: learn: 0.6614302 total: 216ms remaining: 9ms 48: learn: 0.6609983 total: 220ms remaining: 4.49ms 49: learn: 0.6605296 total: 224ms remaining: 0us 0: learn: 0.6921470 total: 5.07ms remaining: 249ms 1: learn: 0.6912489 total: 10.3ms remaining: 246ms 2: learn: 0.6903650 total: 15.1ms remaining: 237ms 3: learn: 0.6894674 total: 19.9ms remaining: 229ms 4: learn: 0.6885049 total: 24.8ms remaining: 223ms 5: learn: 0.6876261 total: 29.7ms remaining: 218ms 6: learn: 0.6867983 total: 34.4ms remaining: 211ms 7: learn: 0.6859387 total: 39.3ms remaining: 207ms 8: learn: 0.6851703 total: 44.1ms remaining: 201ms 9: learn: 0.6843971 total: 48.9ms remaining: 196ms 10: learn: 0.6836360 total: 53.7ms remaining: 191ms 11: learn: 0.6828538 total: 58.3ms remaining: 185ms 12: learn: 0.6821209 total: 63.2ms remaining: 180ms 13: learn: 0.6813259 total: 68.1ms remaining: 175ms 14: learn: 0.6805428 total: 72.7ms remaining: 170ms 15: learn: 0.6798709 total: 77.7ms remaining: 165ms 16: learn: 0.6792006 total: 82.4ms remaining: 160ms 17: learn: 0.6784006 total: 87.3ms remaining: 155ms 18: learn: 0.6776681 total: 92ms remaining: 150ms 19: learn: 0.6769385 total: 96.6ms remaining: 145ms 20: learn: 0.6763158 total: 101ms remaining: 140ms 21: learn: 0.6756694 total: 106ms remaining: 135ms 22: learn: 0.6750333 total: 110ms remaining: 129ms 23: learn: 0.6743659 total: 114ms remaining: 124ms 24: learn: 0.6737026 total: 119ms remaining: 119ms 25: learn: 0.6729766 total: 123ms remaining: 113ms 26: learn: 0.6723665 total: 127ms remaining: 108ms 27: learn: 0.6717549 total: 131ms remaining: 103ms 28: learn: 0.6711532 total: 135ms remaining: 98.1ms 29: learn: 0.6705638 total: 140ms remaining: 93.1ms 30: learn: 0.6700315 total: 144ms remaining: 88.1ms 31: learn: 0.6694955 total: 148ms remaining: 83.2ms 32: learn: 0.6688384 total: 152ms remaining: 78.4ms 33: learn: 0.6682182 total: 157ms remaining: 73.7ms 34: learn: 0.6677158 total: 161ms remaining: 68.9ms 35: learn: 0.6671632 total: 165ms remaining: 64.1ms 36: learn: 0.6666003 total: 169ms remaining: 59.5ms 37: learn: 0.6660561 total: 174ms remaining: 54.8ms 38: learn: 0.6655261 total: 178ms remaining: 50.2ms 39: learn: 0.6650693 total: 183ms remaining: 45.7ms 40: learn: 0.6645931 total: 187ms remaining: 41.1ms 41: learn: 0.6640800 total: 192ms remaining: 36.5ms 42: learn: 0.6636155 total: 196ms remaining: 31.9ms 43: learn: 0.6630205 total: 200ms remaining: 27.3ms 44: learn: 0.6625727 total: 205ms remaining: 22.8ms 45: learn: 0.6621471 total: 209ms remaining: 18.2ms 46: learn: 0.6617091 total: 214ms remaining: 13.6ms 47: learn: 0.6612352 total: 218ms remaining: 9.07ms 48: learn: 0.6607763 total: 222ms remaining: 4.53ms 49: learn: 0.6603649 total: 226ms remaining: 0us 0: learn: 0.6921715 total: 4.83ms remaining: 236ms 1: learn: 0.6912816 total: 9.98ms remaining: 240ms 2: learn: 0.6904107 total: 14.9ms remaining: 234ms 3: learn: 0.6895320 total: 19.7ms remaining: 227ms 4: learn: 0.6886191 total: 24.5ms remaining: 220ms 5: learn: 0.6877583 total: 29.2ms remaining: 214ms 6: learn: 0.6869483 total: 34ms remaining: 209ms 7: learn: 0.6860728 total: 39ms remaining: 205ms 8: learn: 0.6852892 total: 43.7ms remaining: 199ms 9: learn: 0.6845102 total: 48.1ms remaining: 192ms 10: learn: 0.6837364 total: 52.8ms remaining: 187ms 11: learn: 0.6829679 total: 57.7ms remaining: 183ms 12: learn: 0.6822290 total: 62.4ms remaining: 178ms 13: learn: 0.6814190 total: 67ms remaining: 172ms 14: learn: 0.6806480 total: 71.8ms remaining: 168ms 15: learn: 0.6799887 total: 76.7ms remaining: 163ms 16: learn: 0.6793341 total: 81.5ms remaining: 158ms 17: learn: 0.6786181 total: 86.3ms remaining: 153ms 18: learn: 0.6779427 total: 91.2ms remaining: 149ms 19: learn: 0.6772188 total: 95.8ms remaining: 144ms 20: learn: 0.6765775 total: 100ms remaining: 139ms 21: learn: 0.6759528 total: 105ms remaining: 133ms 22: learn: 0.6752654 total: 109ms remaining: 128ms 23: learn: 0.6746043 total: 113ms remaining: 123ms 24: learn: 0.6739620 total: 118ms remaining: 118ms 25: learn: 0.6733352 total: 122ms remaining: 112ms 26: learn: 0.6727376 total: 126ms remaining: 107ms 27: learn: 0.6721400 total: 130ms remaining: 102ms 28: learn: 0.6714965 total: 135ms remaining: 97.5ms 29: learn: 0.6709426 total: 139ms remaining: 92.5ms 30: learn: 0.6703744 total: 143ms remaining: 87.5ms 31: learn: 0.6698377 total: 147ms remaining: 82.7ms 32: learn: 0.6691988 total: 151ms remaining: 77.9ms 33: learn: 0.6686311 total: 155ms remaining: 73.1ms 34: learn: 0.6681412 total: 160ms remaining: 68.4ms 35: learn: 0.6675786 total: 164ms remaining: 63.7ms 36: learn: 0.6669812 total: 168ms remaining: 59ms 37: learn: 0.6664250 total: 172ms remaining: 54.3ms 38: learn: 0.6658997 total: 177ms remaining: 49.8ms 39: learn: 0.6654462 total: 181ms remaining: 45.1ms 40: learn: 0.6649852 total: 185ms remaining: 40.7ms 41: learn: 0.6644923 total: 190ms remaining: 36.2ms 42: learn: 0.6639526 total: 194ms remaining: 31.6ms 43: learn: 0.6633945 total: 199ms remaining: 27.1ms 44: learn: 0.6628482 total: 203ms remaining: 22.6ms 45: learn: 0.6624000 total: 207ms remaining: 18ms 46: learn: 0.6619583 total: 211ms remaining: 13.5ms 47: learn: 0.6614859 total: 216ms remaining: 8.98ms 48: learn: 0.6610002 total: 220ms remaining: 4.48ms 49: learn: 0.6605205 total: 224ms remaining: 0us 0: learn: 0.6922217 total: 5.01ms remaining: 246ms 1: learn: 0.6913853 total: 10.2ms remaining: 246ms 2: learn: 0.6905140 total: 15.1ms remaining: 237ms 3: learn: 0.6897418 total: 20.2ms remaining: 232ms 4: learn: 0.6887937 total: 24.8ms remaining: 223ms 5: learn: 0.6879609 total: 29.7ms remaining: 218ms 6: learn: 0.6871608 total: 34.4ms remaining: 211ms 7: learn: 0.6863368 total: 39ms remaining: 205ms 8: learn: 0.6855830 total: 43.7ms remaining: 199ms 9: learn: 0.6848045 total: 48.3ms remaining: 193ms 10: learn: 0.6840721 total: 53ms remaining: 188ms 11: learn: 0.6833443 total: 57.9ms remaining: 183ms 12: learn: 0.6826200 total: 62.5ms remaining: 178ms 13: learn: 0.6818612 total: 67.5ms remaining: 174ms 14: learn: 0.6811520 total: 72.4ms remaining: 169ms 15: learn: 0.6805039 total: 77.4ms remaining: 164ms 16: learn: 0.6798648 total: 82.3ms remaining: 160ms 17: learn: 0.6792198 total: 86.9ms remaining: 154ms 18: learn: 0.6785304 total: 91.8ms remaining: 150ms 19: learn: 0.6778163 total: 96.1ms remaining: 144ms 20: learn: 0.6771316 total: 100ms remaining: 139ms 21: learn: 0.6764934 total: 105ms remaining: 134ms 22: learn: 0.6758187 total: 109ms remaining: 128ms 23: learn: 0.6751007 total: 114ms remaining: 123ms 24: learn: 0.6744817 total: 118ms remaining: 118ms 25: learn: 0.6738427 total: 122ms remaining: 113ms 26: learn: 0.6732516 total: 126ms remaining: 108ms 27: learn: 0.6727017 total: 131ms remaining: 103ms 28: learn: 0.6720599 total: 135ms remaining: 97.8ms 29: learn: 0.6715157 total: 139ms remaining: 92.8ms 30: learn: 0.6709946 total: 144ms remaining: 88ms 31: learn: 0.6704673 total: 148ms remaining: 83.2ms 32: learn: 0.6697751 total: 152ms remaining: 78.4ms 33: learn: 0.6691874 total: 156ms remaining: 73.6ms 34: learn: 0.6686904 total: 161ms remaining: 68.9ms 35: learn: 0.6681100 total: 165ms remaining: 64.2ms 36: learn: 0.6675637 total: 169ms remaining: 59.5ms 37: learn: 0.6669932 total: 174ms remaining: 54.8ms 38: learn: 0.6664853 total: 178ms remaining: 50.1ms 39: learn: 0.6660221 total: 182ms remaining: 45.4ms 40: learn: 0.6655541 total: 187ms remaining: 40.9ms 41: learn: 0.6650437 total: 191ms remaining: 36.4ms 42: learn: 0.6645777 total: 195ms remaining: 31.8ms 43: learn: 0.6640358 total: 200ms remaining: 27.3ms 44: learn: 0.6635059 total: 205ms remaining: 22.7ms 45: learn: 0.6630847 total: 209ms remaining: 18.2ms 46: learn: 0.6626807 total: 213ms remaining: 13.6ms 47: learn: 0.6622358 total: 218ms remaining: 9.07ms 48: learn: 0.6618338 total: 222ms remaining: 4.53ms 49: learn: 0.6614009 total: 226ms remaining: 0us 0: learn: 0.6930536 total: 4.77ms remaining: 42.9ms 1: learn: 0.6929686 total: 9.91ms remaining: 39.6ms 2: learn: 0.6928794 total: 14.6ms remaining: 34.1ms 3: learn: 0.6927899 total: 19.2ms remaining: 28.8ms 4: learn: 0.6927015 total: 24ms remaining: 24ms 5: learn: 0.6926137 total: 28.6ms remaining: 19.1ms 6: learn: 0.6925165 total: 33.3ms remaining: 14.3ms 7: learn: 0.6924239 total: 38.2ms remaining: 9.54ms 8: learn: 0.6923324 total: 43.4ms remaining: 4.82ms 9: learn: 0.6922460 total: 48.1ms remaining: 0us 0: learn: 0.6930481 total: 5.22ms remaining: 47ms 1: learn: 0.6929514 total: 9.95ms remaining: 39.8ms 2: learn: 0.6928649 total: 14.8ms remaining: 34.4ms 3: learn: 0.6927763 total: 19.4ms remaining: 29.1ms 4: learn: 0.6926710 total: 24ms remaining: 24ms 5: learn: 0.6925809 total: 28.9ms remaining: 19.2ms 6: learn: 0.6924859 total: 33.9ms remaining: 14.5ms 7: learn: 0.6923900 total: 38.9ms remaining: 9.73ms 8: learn: 0.6922999 total: 43.7ms remaining: 4.86ms 9: learn: 0.6922148 total: 48.2ms remaining: 0us 0: learn: 0.6930497 total: 5.05ms remaining: 45.5ms 1: learn: 0.6929506 total: 10.3ms remaining: 41.2ms 2: learn: 0.6928640 total: 15.1ms remaining: 35.3ms 3: learn: 0.6927760 total: 20ms remaining: 30ms 4: learn: 0.6926763 total: 24.8ms remaining: 24.8ms 5: learn: 0.6925839 total: 29.8ms remaining: 19.9ms 6: learn: 0.6924857 total: 34.6ms remaining: 14.8ms 7: learn: 0.6923920 total: 39.4ms remaining: 9.85ms 8: learn: 0.6923024 total: 44.1ms remaining: 4.9ms 9: learn: 0.6922073 total: 48.7ms remaining: 0us 0: learn: 0.6930532 total: 4.86ms remaining: 43.7ms 1: learn: 0.6929639 total: 9.85ms remaining: 39.4ms 2: learn: 0.6928756 total: 14.7ms remaining: 34.4ms 3: learn: 0.6927820 total: 19.5ms remaining: 29.2ms 4: learn: 0.6926797 total: 24.1ms remaining: 24.1ms 5: learn: 0.6925863 total: 28.9ms remaining: 19.3ms 6: learn: 0.6924953 total: 33.7ms remaining: 14.4ms 7: learn: 0.6923980 total: 38.6ms remaining: 9.66ms 8: learn: 0.6923078 total: 43.4ms remaining: 4.82ms 9: learn: 0.6922221 total: 48ms remaining: 0us 0: learn: 0.6930443 total: 4.95ms remaining: 44.5ms 1: learn: 0.6929517 total: 10.2ms remaining: 40.7ms 2: learn: 0.6928611 total: 15.2ms remaining: 35.6ms 3: learn: 0.6927705 total: 20.2ms remaining: 30.3ms 4: learn: 0.6926741 total: 25.1ms remaining: 25.1ms 5: learn: 0.6925844 total: 29.9ms remaining: 20ms 6: learn: 0.6924939 total: 34.9ms remaining: 15ms 7: learn: 0.6923966 total: 39.8ms remaining: 9.95ms 8: learn: 0.6923099 total: 44.4ms remaining: 4.94ms 9: learn: 0.6922241 total: 49.1ms remaining: 0us 0: learn: 0.6930547 total: 4.98ms remaining: 44.8ms 1: learn: 0.6929511 total: 10ms remaining: 40.1ms 2: learn: 0.6928648 total: 14.4ms remaining: 33.6ms 3: learn: 0.6927714 total: 19.2ms remaining: 28.8ms 4: learn: 0.6926688 total: 23.8ms remaining: 23.8ms 5: learn: 0.6925755 total: 29.1ms remaining: 19.4ms 6: learn: 0.6924858 total: 34.1ms remaining: 14.6ms 7: learn: 0.6923920 total: 38.8ms remaining: 9.71ms 8: learn: 0.6923009 total: 43.3ms remaining: 4.81ms 9: learn: 0.6922124 total: 48.1ms remaining: 0us 0: learn: 0.6930443 total: 4.79ms remaining: 43.1ms 1: learn: 0.6929525 total: 9.81ms remaining: 39.2ms 2: learn: 0.6928646 total: 14.7ms remaining: 34.2ms 3: learn: 0.6927832 total: 19.3ms remaining: 29ms 4: learn: 0.6926894 total: 24.1ms remaining: 24.1ms 5: learn: 0.6925963 total: 28.6ms remaining: 19.1ms 6: learn: 0.6925016 total: 33.5ms remaining: 14.3ms 7: learn: 0.6924088 total: 38.3ms remaining: 9.56ms 8: learn: 0.6923209 total: 43ms remaining: 4.77ms 9: learn: 0.6922313 total: 48.1ms remaining: 0us 0: learn: 0.6930467 total: 5.1ms remaining: 45.9ms 1: learn: 0.6929545 total: 10.1ms remaining: 40.5ms 2: learn: 0.6928628 total: 15ms remaining: 35ms 3: learn: 0.6927671 total: 20.2ms remaining: 30.3ms 4: learn: 0.6926661 total: 24.7ms remaining: 24.7ms 5: learn: 0.6925708 total: 29.4ms remaining: 19.6ms 6: learn: 0.6924787 total: 34.2ms remaining: 14.6ms 7: learn: 0.6923846 total: 38.9ms remaining: 9.73ms 8: learn: 0.6922955 total: 43.7ms remaining: 4.86ms 9: learn: 0.6922050 total: 48.5ms remaining: 0us 0: learn: 0.6930468 total: 5ms remaining: 45ms 1: learn: 0.6929566 total: 10.2ms remaining: 40.6ms 2: learn: 0.6928693 total: 15.3ms remaining: 35.7ms 3: learn: 0.6927756 total: 20.3ms remaining: 30.5ms 4: learn: 0.6926774 total: 25.1ms remaining: 25.1ms 5: learn: 0.6925827 total: 29.7ms remaining: 19.8ms 6: learn: 0.6924925 total: 34.3ms remaining: 14.7ms 7: learn: 0.6923999 total: 39.2ms remaining: 9.81ms 8: learn: 0.6923097 total: 43.7ms remaining: 4.85ms 9: learn: 0.6922205 total: 48.2ms remaining: 0us 0: learn: 0.6930531 total: 4.8ms remaining: 43.2ms 1: learn: 0.6929694 total: 9.63ms remaining: 38.5ms 2: learn: 0.6928776 total: 14.5ms remaining: 33.9ms 3: learn: 0.6927957 total: 19.3ms remaining: 29ms 4: learn: 0.6927006 total: 24.3ms remaining: 24.3ms 5: learn: 0.6926130 total: 29.2ms remaining: 19.5ms 6: learn: 0.6925219 total: 34.1ms remaining: 14.6ms 7: learn: 0.6924288 total: 39ms remaining: 9.75ms 8: learn: 0.6923451 total: 43.9ms remaining: 4.87ms 9: learn: 0.6922595 total: 48.5ms remaining: 0us 0: learn: 0.6930536 total: 5.08ms remaining: 122ms 1: learn: 0.6929686 total: 10.1ms remaining: 116ms 2: learn: 0.6928794 total: 15ms remaining: 110ms 3: learn: 0.6927899 total: 19.8ms remaining: 104ms 4: learn: 0.6927015 total: 24.6ms remaining: 98.4ms 5: learn: 0.6926137 total: 29.3ms remaining: 92.9ms 6: learn: 0.6925165 total: 33.9ms remaining: 87.2ms 7: learn: 0.6924239 total: 38.7ms remaining: 82.2ms 8: learn: 0.6923324 total: 43.3ms remaining: 77.1ms 9: learn: 0.6922460 total: 48.1ms remaining: 72.1ms 10: learn: 0.6921522 total: 53ms remaining: 67.4ms 11: learn: 0.6920595 total: 57.6ms remaining: 62.4ms 12: learn: 0.6919660 total: 62.3ms remaining: 57.5ms 13: learn: 0.6918848 total: 67.1ms remaining: 52.7ms 14: learn: 0.6917922 total: 71.7ms remaining: 47.8ms 15: learn: 0.6917085 total: 76.4ms remaining: 43ms 16: learn: 0.6916210 total: 81.3ms remaining: 38.3ms 17: learn: 0.6915222 total: 86ms remaining: 33.4ms 18: learn: 0.6914281 total: 90.6ms remaining: 28.6ms 19: learn: 0.6913364 total: 95.5ms remaining: 23.9ms 20: learn: 0.6912530 total: 100ms remaining: 19ms 21: learn: 0.6911645 total: 105ms remaining: 14.3ms 22: learn: 0.6910754 total: 110ms remaining: 9.52ms 23: learn: 0.6909912 total: 114ms remaining: 4.75ms 24: learn: 0.6908994 total: 119ms remaining: 0us 0: learn: 0.6930481 total: 5.04ms remaining: 121ms 1: learn: 0.6929514 total: 10.1ms remaining: 116ms 2: learn: 0.6928649 total: 14.9ms remaining: 109ms 3: learn: 0.6927763 total: 19.5ms remaining: 102ms 4: learn: 0.6926710 total: 24ms remaining: 96.1ms 5: learn: 0.6925809 total: 28.7ms remaining: 90.9ms 6: learn: 0.6924859 total: 33.5ms remaining: 86.2ms 7: learn: 0.6923900 total: 38.3ms remaining: 81.3ms 8: learn: 0.6922999 total: 43ms remaining: 76.5ms 9: learn: 0.6922148 total: 47.9ms remaining: 71.8ms 10: learn: 0.6921234 total: 52.9ms remaining: 67.3ms 11: learn: 0.6920306 total: 57.5ms remaining: 62.3ms 12: learn: 0.6919441 total: 62.4ms remaining: 57.6ms 13: learn: 0.6918634 total: 67.2ms remaining: 52.8ms 14: learn: 0.6917694 total: 71.9ms remaining: 47.9ms 15: learn: 0.6916868 total: 76.8ms remaining: 43.2ms 16: learn: 0.6916022 total: 81.5ms remaining: 38.4ms 17: learn: 0.6915058 total: 86.8ms remaining: 33.7ms 18: learn: 0.6914119 total: 91.4ms remaining: 28.9ms 19: learn: 0.6913209 total: 96.2ms remaining: 24ms 20: learn: 0.6912356 total: 101ms remaining: 19.2ms 21: learn: 0.6911396 total: 105ms remaining: 14.4ms 22: learn: 0.6910518 total: 110ms remaining: 9.56ms 23: learn: 0.6909684 total: 114ms remaining: 4.76ms 24: learn: 0.6908796 total: 119ms remaining: 0us 0: learn: 0.6930497 total: 5.02ms remaining: 121ms 1: learn: 0.6929506 total: 10.2ms remaining: 118ms 2: learn: 0.6928640 total: 15ms remaining: 110ms 3: learn: 0.6927760 total: 19.6ms remaining: 103ms 4: learn: 0.6926763 total: 24.3ms remaining: 97.3ms 5: learn: 0.6925839 total: 29ms remaining: 91.9ms 6: learn: 0.6924857 total: 33.7ms remaining: 86.7ms 7: learn: 0.6923920 total: 38.5ms remaining: 81.9ms 8: learn: 0.6923024 total: 43.2ms remaining: 76.9ms 9: learn: 0.6922073 total: 48.1ms remaining: 72.1ms 10: learn: 0.6921160 total: 52.7ms remaining: 67.1ms 11: learn: 0.6920267 total: 57.3ms remaining: 62.1ms 12: learn: 0.6919391 total: 62.1ms remaining: 57.3ms 13: learn: 0.6918537 total: 66.8ms remaining: 52.5ms 14: learn: 0.6917597 total: 71.6ms remaining: 47.8ms 15: learn: 0.6916782 total: 76.5ms remaining: 43ms 16: learn: 0.6915968 total: 81.5ms remaining: 38.3ms 17: learn: 0.6914960 total: 86.1ms remaining: 33.5ms 18: learn: 0.6914024 total: 90.9ms remaining: 28.7ms 19: learn: 0.6913069 total: 95.6ms remaining: 23.9ms 20: learn: 0.6912185 total: 100ms remaining: 19.1ms 21: learn: 0.6911234 total: 105ms remaining: 14.3ms 22: learn: 0.6910339 total: 109ms remaining: 9.47ms 23: learn: 0.6909527 total: 113ms remaining: 4.72ms 24: learn: 0.6908660 total: 118ms remaining: 0us 0: learn: 0.6930532 total: 5.01ms remaining: 120ms 1: learn: 0.6929639 total: 10.2ms remaining: 117ms 2: learn: 0.6928756 total: 15.2ms remaining: 111ms 3: learn: 0.6927820 total: 19.9ms remaining: 104ms 4: learn: 0.6926797 total: 24.5ms remaining: 97.9ms 5: learn: 0.6925863 total: 29.3ms remaining: 92.9ms 6: learn: 0.6924953 total: 34ms remaining: 87.5ms 7: learn: 0.6923980 total: 38.8ms remaining: 82.3ms 8: learn: 0.6923078 total: 43.3ms remaining: 77ms 9: learn: 0.6922221 total: 48ms remaining: 71.9ms 10: learn: 0.6921274 total: 52.6ms remaining: 67ms 11: learn: 0.6920382 total: 57.2ms remaining: 62ms 12: learn: 0.6919462 total: 62.1ms remaining: 57.3ms 13: learn: 0.6918508 total: 67ms remaining: 52.6ms 14: learn: 0.6917585 total: 71.8ms remaining: 47.9ms 15: learn: 0.6916745 total: 76.7ms remaining: 43.2ms 16: learn: 0.6915922 total: 81.4ms remaining: 38.3ms 17: learn: 0.6915026 total: 86ms remaining: 33.5ms 18: learn: 0.6914087 total: 90.9ms remaining: 28.7ms 19: learn: 0.6913160 total: 95.4ms remaining: 23.9ms 20: learn: 0.6912241 total: 100ms remaining: 19.1ms 21: learn: 0.6911263 total: 105ms remaining: 14.3ms 22: learn: 0.6910371 total: 109ms remaining: 9.52ms 23: learn: 0.6909400 total: 114ms remaining: 4.75ms 24: learn: 0.6908527 total: 118ms remaining: 0us 0: learn: 0.6930443 total: 5.12ms remaining: 123ms 1: learn: 0.6929517 total: 10.3ms remaining: 118ms 2: learn: 0.6928611 total: 15.2ms remaining: 112ms 3: learn: 0.6927705 total: 20ms remaining: 105ms 4: learn: 0.6926741 total: 24.7ms remaining: 98.8ms 5: learn: 0.6925844 total: 29.5ms remaining: 93.4ms 6: learn: 0.6924939 total: 34.2ms remaining: 87.9ms 7: learn: 0.6923966 total: 39ms remaining: 82.8ms 8: learn: 0.6923099 total: 43.8ms remaining: 77.8ms 9: learn: 0.6922241 total: 48.3ms remaining: 72.5ms 10: learn: 0.6921377 total: 53.1ms remaining: 67.6ms 11: learn: 0.6920474 total: 57.6ms remaining: 62.4ms 12: learn: 0.6919601 total: 62.3ms remaining: 57.5ms 13: learn: 0.6918781 total: 67.2ms remaining: 52.8ms 14: learn: 0.6917840 total: 72.1ms remaining: 48.1ms 15: learn: 0.6917008 total: 76.8ms remaining: 43.2ms 16: learn: 0.6916175 total: 81.3ms remaining: 38.3ms 17: learn: 0.6915288 total: 86ms remaining: 33.4ms 18: learn: 0.6914401 total: 90.6ms remaining: 28.6ms 19: learn: 0.6913508 total: 95.5ms remaining: 23.9ms 20: learn: 0.6912669 total: 99.9ms remaining: 19ms 21: learn: 0.6911794 total: 104ms remaining: 14.2ms 22: learn: 0.6910865 total: 109ms remaining: 9.46ms 23: learn: 0.6910039 total: 113ms remaining: 4.7ms 24: learn: 0.6909129 total: 117ms remaining: 0us 0: learn: 0.6930547 total: 4.85ms remaining: 116ms 1: learn: 0.6929511 total: 10ms remaining: 116ms 2: learn: 0.6928648 total: 15ms remaining: 110ms 3: learn: 0.6927714 total: 19.7ms remaining: 103ms 4: learn: 0.6926688 total: 24.3ms remaining: 97.3ms 5: learn: 0.6925755 total: 29.1ms remaining: 92ms 6: learn: 0.6924858 total: 33.7ms remaining: 86.6ms 7: learn: 0.6923920 total: 38.4ms remaining: 81.7ms 8: learn: 0.6923009 total: 42.9ms remaining: 76.3ms 9: learn: 0.6922124 total: 47.5ms remaining: 71.3ms 10: learn: 0.6921237 total: 52.2ms remaining: 66.4ms 11: learn: 0.6920327 total: 56.7ms remaining: 61.4ms 12: learn: 0.6919424 total: 61.6ms remaining: 56.9ms 13: learn: 0.6918480 total: 66.3ms remaining: 52.1ms 14: learn: 0.6917546 total: 71ms remaining: 47.4ms 15: learn: 0.6916703 total: 75.9ms remaining: 42.7ms 16: learn: 0.6915859 total: 80.7ms remaining: 38ms 17: learn: 0.6914846 total: 85.6ms remaining: 33.3ms 18: learn: 0.6913889 total: 90.7ms remaining: 28.6ms 19: learn: 0.6912941 total: 95.1ms remaining: 23.8ms 20: learn: 0.6912021 total: 99.3ms remaining: 18.9ms 21: learn: 0.6911061 total: 104ms remaining: 14.1ms 22: learn: 0.6910148 total: 108ms remaining: 9.37ms 23: learn: 0.6909300 total: 112ms remaining: 4.66ms 24: learn: 0.6908405 total: 116ms remaining: 0us 0: learn: 0.6930443 total: 4.94ms remaining: 119ms 1: learn: 0.6929525 total: 9.92ms remaining: 114ms 2: learn: 0.6928646 total: 14.8ms remaining: 108ms 3: learn: 0.6927832 total: 19.4ms remaining: 102ms 4: learn: 0.6926894 total: 24ms remaining: 96.1ms 5: learn: 0.6925963 total: 28.9ms remaining: 91.6ms 6: learn: 0.6925016 total: 33.6ms remaining: 86.5ms 7: learn: 0.6924088 total: 38.5ms remaining: 81.7ms 8: learn: 0.6923209 total: 43.2ms remaining: 76.8ms 9: learn: 0.6922313 total: 48.1ms remaining: 72.1ms 10: learn: 0.6921447 total: 52.8ms remaining: 67.2ms 11: learn: 0.6920557 total: 57.3ms remaining: 62.1ms 12: learn: 0.6919649 total: 62.2ms remaining: 57.4ms 13: learn: 0.6918775 total: 67.1ms remaining: 52.7ms 14: learn: 0.6917820 total: 71.8ms remaining: 47.8ms 15: learn: 0.6916973 total: 76.5ms remaining: 43ms 16: learn: 0.6916133 total: 81ms remaining: 38.1ms 17: learn: 0.6915232 total: 85.9ms remaining: 33.4ms 18: learn: 0.6914290 total: 90.7ms remaining: 28.6ms 19: learn: 0.6913346 total: 95.3ms remaining: 23.8ms 20: learn: 0.6912512 total: 99.9ms remaining: 19ms 21: learn: 0.6911580 total: 105ms remaining: 14.3ms 22: learn: 0.6910653 total: 109ms remaining: 9.47ms 23: learn: 0.6909720 total: 113ms remaining: 4.72ms 24: learn: 0.6908842 total: 117ms remaining: 0us 0: learn: 0.6930467 total: 5.03ms remaining: 121ms 1: learn: 0.6929545 total: 10.1ms remaining: 117ms 2: learn: 0.6928628 total: 14.9ms remaining: 109ms 3: learn: 0.6927671 total: 19.5ms remaining: 102ms 4: learn: 0.6926661 total: 24.3ms remaining: 97ms 5: learn: 0.6925708 total: 29.2ms remaining: 92.5ms 6: learn: 0.6924787 total: 33.9ms remaining: 87ms 7: learn: 0.6923846 total: 38.7ms remaining: 82.3ms 8: learn: 0.6922955 total: 43.6ms remaining: 77.6ms 9: learn: 0.6922050 total: 48.4ms remaining: 72.6ms 10: learn: 0.6921060 total: 53.2ms remaining: 67.8ms 11: learn: 0.6920166 total: 57.9ms remaining: 62.8ms 12: learn: 0.6919264 total: 62.6ms remaining: 57.8ms 13: learn: 0.6918286 total: 67.3ms remaining: 52.9ms 14: learn: 0.6917336 total: 72ms remaining: 48ms 15: learn: 0.6916508 total: 76.7ms remaining: 43.1ms 16: learn: 0.6915645 total: 81.5ms remaining: 38.3ms 17: learn: 0.6914664 total: 86.2ms remaining: 33.5ms 18: learn: 0.6913711 total: 90.8ms remaining: 28.7ms 19: learn: 0.6912756 total: 95.2ms remaining: 23.8ms 20: learn: 0.6911914 total: 99.4ms remaining: 18.9ms 21: learn: 0.6910939 total: 104ms remaining: 14.2ms 22: learn: 0.6910013 total: 108ms remaining: 9.4ms 23: learn: 0.6909191 total: 113ms remaining: 4.69ms 24: learn: 0.6908253 total: 117ms remaining: 0us 0: learn: 0.6930468 total: 4.84ms remaining: 116ms 1: learn: 0.6929566 total: 9.78ms remaining: 112ms 2: learn: 0.6928693 total: 14.7ms remaining: 108ms 3: learn: 0.6927756 total: 19.5ms remaining: 102ms 4: learn: 0.6926774 total: 24.2ms remaining: 96.8ms 5: learn: 0.6925827 total: 28.8ms remaining: 91.3ms 6: learn: 0.6924925 total: 33.5ms remaining: 86.2ms 7: learn: 0.6923999 total: 38.3ms remaining: 81.3ms 8: learn: 0.6923097 total: 42.8ms remaining: 76.1ms 9: learn: 0.6922205 total: 47.4ms remaining: 71.2ms 10: learn: 0.6921302 total: 52.2ms remaining: 66.4ms 11: learn: 0.6920439 total: 56.8ms remaining: 61.6ms 12: learn: 0.6919558 total: 62ms remaining: 57.2ms 13: learn: 0.6918583 total: 66.8ms remaining: 52.5ms 14: learn: 0.6917702 total: 71.4ms remaining: 47.6ms 15: learn: 0.6916895 total: 76.7ms remaining: 43.1ms 16: learn: 0.6916063 total: 81.3ms remaining: 38.2ms 17: learn: 0.6915143 total: 85.8ms remaining: 33.4ms 18: learn: 0.6914255 total: 90.2ms remaining: 28.5ms 19: learn: 0.6913322 total: 94.5ms remaining: 23.6ms 20: learn: 0.6912455 total: 98.5ms remaining: 18.8ms 21: learn: 0.6911560 total: 103ms remaining: 14ms 22: learn: 0.6910628 total: 107ms remaining: 9.3ms 23: learn: 0.6909714 total: 111ms remaining: 4.63ms 24: learn: 0.6908861 total: 115ms remaining: 0us 0: learn: 0.6930531 total: 5.12ms remaining: 123ms 1: learn: 0.6929694 total: 10.4ms remaining: 120ms 2: learn: 0.6928776 total: 15.3ms remaining: 112ms 3: learn: 0.6927957 total: 19.9ms remaining: 104ms 4: learn: 0.6927006 total: 24.6ms remaining: 98.5ms 5: learn: 0.6926130 total: 29.5ms remaining: 93.3ms 6: learn: 0.6925219 total: 34.3ms remaining: 88.1ms 7: learn: 0.6924288 total: 38.9ms remaining: 82.6ms 8: learn: 0.6923451 total: 43.5ms remaining: 77.3ms 9: learn: 0.6922595 total: 48ms remaining: 71.9ms 10: learn: 0.6921746 total: 52.7ms remaining: 67ms 11: learn: 0.6920875 total: 57.4ms remaining: 62.1ms 12: learn: 0.6920013 total: 62ms remaining: 57.2ms 13: learn: 0.6919094 total: 66.7ms remaining: 52.4ms 14: learn: 0.6918202 total: 71.3ms remaining: 47.5ms 15: learn: 0.6917415 total: 76.2ms remaining: 42.9ms 16: learn: 0.6916506 total: 81.1ms remaining: 38.2ms 17: learn: 0.6915620 total: 85.9ms remaining: 33.4ms 18: learn: 0.6914727 total: 90.4ms remaining: 28.6ms 19: learn: 0.6913805 total: 94.9ms remaining: 23.7ms 20: learn: 0.6912945 total: 99.2ms remaining: 18.9ms 21: learn: 0.6912087 total: 104ms remaining: 14.1ms 22: learn: 0.6911250 total: 108ms remaining: 9.38ms 23: learn: 0.6910457 total: 112ms remaining: 4.67ms 24: learn: 0.6909565 total: 116ms remaining: 0us 0: learn: 0.6930536 total: 4.82ms remaining: 236ms 1: learn: 0.6929686 total: 9.89ms remaining: 237ms 2: learn: 0.6928794 total: 14.9ms remaining: 233ms 3: learn: 0.6927899 total: 19.9ms remaining: 229ms 4: learn: 0.6927015 total: 24.7ms remaining: 222ms 5: learn: 0.6926137 total: 29.2ms remaining: 214ms 6: learn: 0.6925165 total: 34ms remaining: 209ms 7: learn: 0.6924239 total: 38.7ms remaining: 203ms 8: learn: 0.6923324 total: 43.2ms remaining: 197ms 9: learn: 0.6922460 total: 47.8ms remaining: 191ms 10: learn: 0.6921522 total: 52.4ms remaining: 186ms 11: learn: 0.6920595 total: 57ms remaining: 181ms 12: learn: 0.6919660 total: 61.8ms remaining: 176ms 13: learn: 0.6918848 total: 66.5ms remaining: 171ms 14: learn: 0.6917922 total: 71ms remaining: 166ms 15: learn: 0.6917085 total: 75.8ms remaining: 161ms 16: learn: 0.6916210 total: 80.4ms remaining: 156ms 17: learn: 0.6915222 total: 85.4ms remaining: 152ms 18: learn: 0.6914281 total: 90.1ms remaining: 147ms 19: learn: 0.6913364 total: 94.7ms remaining: 142ms 20: learn: 0.6912530 total: 99ms remaining: 137ms 21: learn: 0.6911645 total: 103ms remaining: 132ms 22: learn: 0.6910754 total: 108ms remaining: 126ms 23: learn: 0.6909912 total: 112ms remaining: 121ms 24: learn: 0.6908994 total: 116ms remaining: 116ms 25: learn: 0.6908134 total: 120ms remaining: 111ms 26: learn: 0.6907257 total: 125ms remaining: 106ms 27: learn: 0.6906331 total: 129ms remaining: 101ms 28: learn: 0.6905456 total: 133ms remaining: 96.1ms 29: learn: 0.6904585 total: 137ms remaining: 91.2ms 30: learn: 0.6903791 total: 141ms remaining: 86.5ms 31: learn: 0.6902987 total: 145ms remaining: 81.8ms 32: learn: 0.6902084 total: 150ms remaining: 77.1ms 33: learn: 0.6901204 total: 154ms remaining: 72.5ms 34: learn: 0.6900366 total: 158ms remaining: 67.9ms 35: learn: 0.6899472 total: 163ms remaining: 63.3ms 36: learn: 0.6898592 total: 167ms remaining: 58.6ms 37: learn: 0.6897734 total: 172ms remaining: 54.2ms 38: learn: 0.6896868 total: 177ms remaining: 49.8ms 39: learn: 0.6896041 total: 181ms remaining: 45.4ms 40: learn: 0.6895210 total: 186ms remaining: 40.8ms 41: learn: 0.6894351 total: 191ms remaining: 36.3ms 42: learn: 0.6893458 total: 195ms remaining: 31.8ms 43: learn: 0.6892457 total: 199ms remaining: 27.2ms 44: learn: 0.6891555 total: 204ms remaining: 22.6ms 45: learn: 0.6890720 total: 208ms remaining: 18.1ms 46: learn: 0.6889866 total: 212ms remaining: 13.6ms 47: learn: 0.6888959 total: 217ms remaining: 9.03ms 48: learn: 0.6888129 total: 221ms remaining: 4.5ms 49: learn: 0.6887341 total: 225ms remaining: 0us 0: learn: 0.6930481 total: 4.85ms remaining: 238ms 1: learn: 0.6929514 total: 9.74ms remaining: 234ms 2: learn: 0.6928649 total: 14.3ms remaining: 225ms 3: learn: 0.6927763 total: 19.1ms remaining: 220ms 4: learn: 0.6926710 total: 24ms remaining: 216ms 5: learn: 0.6925809 total: 28.9ms remaining: 212ms 6: learn: 0.6924859 total: 34ms remaining: 209ms 7: learn: 0.6923900 total: 38.8ms remaining: 204ms 8: learn: 0.6922999 total: 43.7ms remaining: 199ms 9: learn: 0.6922148 total: 48.3ms remaining: 193ms 10: learn: 0.6921234 total: 53.3ms remaining: 189ms 11: learn: 0.6920306 total: 57.7ms remaining: 183ms 12: learn: 0.6919441 total: 62.3ms remaining: 177ms 13: learn: 0.6918634 total: 67ms remaining: 172ms 14: learn: 0.6917694 total: 72.1ms remaining: 168ms 15: learn: 0.6916868 total: 76.7ms remaining: 163ms 16: learn: 0.6916022 total: 81.3ms remaining: 158ms 17: learn: 0.6915058 total: 85.7ms remaining: 152ms 18: learn: 0.6914119 total: 90.2ms remaining: 147ms 19: learn: 0.6913209 total: 94.8ms remaining: 142ms 20: learn: 0.6912356 total: 99.7ms remaining: 138ms 21: learn: 0.6911396 total: 104ms remaining: 133ms 22: learn: 0.6910518 total: 109ms remaining: 128ms 23: learn: 0.6909684 total: 113ms remaining: 123ms 24: learn: 0.6908796 total: 118ms remaining: 118ms 25: learn: 0.6907949 total: 122ms remaining: 113ms 26: learn: 0.6907067 total: 126ms remaining: 108ms 27: learn: 0.6906175 total: 131ms remaining: 103ms 28: learn: 0.6905288 total: 135ms remaining: 97.7ms 29: learn: 0.6904401 total: 139ms remaining: 92.7ms 30: learn: 0.6903557 total: 143ms remaining: 87.7ms 31: learn: 0.6902685 total: 147ms remaining: 82.8ms 32: learn: 0.6901764 total: 151ms remaining: 78ms 33: learn: 0.6900826 total: 156ms remaining: 73.3ms 34: learn: 0.6899997 total: 160ms remaining: 68.7ms 35: learn: 0.6899071 total: 164ms remaining: 64ms 36: learn: 0.6898155 total: 169ms remaining: 59.3ms 37: learn: 0.6897282 total: 173ms remaining: 54.7ms 38: learn: 0.6896415 total: 177ms remaining: 50ms 39: learn: 0.6895548 total: 182ms remaining: 45.5ms 40: learn: 0.6894717 total: 187ms remaining: 41ms 41: learn: 0.6893849 total: 191ms remaining: 36.4ms 42: learn: 0.6892935 total: 196ms remaining: 31.9ms 43: learn: 0.6891950 total: 200ms remaining: 27.3ms 44: learn: 0.6891072 total: 204ms remaining: 22.7ms 45: learn: 0.6890243 total: 209ms remaining: 18.1ms 46: learn: 0.6889413 total: 213ms remaining: 13.6ms 47: learn: 0.6888521 total: 217ms remaining: 9.04ms 48: learn: 0.6887696 total: 221ms remaining: 4.51ms 49: learn: 0.6886930 total: 226ms remaining: 0us 0: learn: 0.6930497 total: 5.13ms remaining: 251ms 1: learn: 0.6929506 total: 10.2ms remaining: 245ms 2: learn: 0.6928640 total: 15.1ms remaining: 237ms 3: learn: 0.6927760 total: 20ms remaining: 230ms 4: learn: 0.6926763 total: 24.6ms remaining: 221ms 5: learn: 0.6925839 total: 29.4ms remaining: 215ms 6: learn: 0.6924857 total: 34ms remaining: 209ms 7: learn: 0.6923920 total: 38.9ms remaining: 204ms 8: learn: 0.6923024 total: 43.6ms remaining: 199ms 9: learn: 0.6922073 total: 48.4ms remaining: 194ms 10: learn: 0.6921160 total: 53.1ms remaining: 188ms 11: learn: 0.6920267 total: 57.8ms remaining: 183ms 12: learn: 0.6919391 total: 62.8ms remaining: 179ms 13: learn: 0.6918537 total: 67.6ms remaining: 174ms 14: learn: 0.6917597 total: 72.5ms remaining: 169ms 15: learn: 0.6916782 total: 77.3ms remaining: 164ms 16: learn: 0.6915968 total: 82.1ms remaining: 159ms 17: learn: 0.6914960 total: 86.8ms remaining: 154ms 18: learn: 0.6914024 total: 91.4ms remaining: 149ms 19: learn: 0.6913069 total: 96.3ms remaining: 144ms 20: learn: 0.6912185 total: 101ms remaining: 139ms 21: learn: 0.6911234 total: 105ms remaining: 134ms 22: learn: 0.6910339 total: 109ms remaining: 128ms 23: learn: 0.6909527 total: 114ms remaining: 123ms 24: learn: 0.6908660 total: 118ms remaining: 118ms 25: learn: 0.6907806 total: 122ms remaining: 113ms 26: learn: 0.6906929 total: 126ms remaining: 108ms 27: learn: 0.6906015 total: 131ms remaining: 103ms 28: learn: 0.6905094 total: 135ms remaining: 97.7ms 29: learn: 0.6904233 total: 139ms remaining: 92.8ms 30: learn: 0.6903418 total: 143ms remaining: 87.9ms 31: learn: 0.6902536 total: 148ms remaining: 83ms 32: learn: 0.6901556 total: 152ms remaining: 78.2ms 33: learn: 0.6900649 total: 156ms remaining: 73.5ms 34: learn: 0.6899825 total: 160ms remaining: 68.7ms 35: learn: 0.6898871 total: 165ms remaining: 64ms 36: learn: 0.6897999 total: 169ms remaining: 59.3ms 37: learn: 0.6897134 total: 173ms remaining: 54.6ms 38: learn: 0.6896360 total: 177ms remaining: 50ms 39: learn: 0.6895543 total: 182ms remaining: 45.5ms 40: learn: 0.6894704 total: 186ms remaining: 40.9ms 41: learn: 0.6893835 total: 191ms remaining: 36.4ms 42: learn: 0.6892919 total: 196ms remaining: 31.9ms 43: learn: 0.6891946 total: 200ms remaining: 27.3ms 44: learn: 0.6891060 total: 204ms remaining: 22.7ms 45: learn: 0.6890184 total: 209ms remaining: 18.1ms 46: learn: 0.6889348 total: 213ms remaining: 13.6ms 47: learn: 0.6888459 total: 217ms remaining: 9.04ms 48: learn: 0.6887641 total: 221ms remaining: 4.52ms 49: learn: 0.6886880 total: 225ms remaining: 0us 0: learn: 0.6930532 total: 4.97ms remaining: 243ms 1: learn: 0.6929639 total: 10ms remaining: 240ms 2: learn: 0.6928756 total: 14.7ms remaining: 230ms 3: learn: 0.6927820 total: 19.4ms remaining: 223ms 4: learn: 0.6926797 total: 24.1ms remaining: 217ms 5: learn: 0.6925863 total: 29ms remaining: 213ms 6: learn: 0.6924953 total: 33.9ms remaining: 208ms 7: learn: 0.6923980 total: 38.7ms remaining: 203ms 8: learn: 0.6923078 total: 43.4ms remaining: 198ms 9: learn: 0.6922221 total: 48ms remaining: 192ms 10: learn: 0.6921274 total: 52.6ms remaining: 187ms 11: learn: 0.6920382 total: 57.3ms remaining: 182ms 12: learn: 0.6919462 total: 62.2ms remaining: 177ms 13: learn: 0.6918508 total: 66.8ms remaining: 172ms 14: learn: 0.6917585 total: 71.5ms remaining: 167ms 15: learn: 0.6916745 total: 76.1ms remaining: 162ms 16: learn: 0.6915922 total: 80.9ms remaining: 157ms 17: learn: 0.6915026 total: 85.4ms remaining: 152ms 18: learn: 0.6914087 total: 90.1ms remaining: 147ms 19: learn: 0.6913160 total: 94.7ms remaining: 142ms 20: learn: 0.6912241 total: 99.5ms remaining: 137ms 21: learn: 0.6911263 total: 104ms remaining: 133ms 22: learn: 0.6910371 total: 109ms remaining: 128ms 23: learn: 0.6909400 total: 113ms remaining: 123ms 24: learn: 0.6908527 total: 117ms remaining: 117ms 25: learn: 0.6907590 total: 122ms remaining: 112ms 26: learn: 0.6906700 total: 126ms remaining: 107ms 27: learn: 0.6905755 total: 130ms remaining: 102ms 28: learn: 0.6904881 total: 134ms remaining: 97.3ms 29: learn: 0.6904016 total: 139ms remaining: 92.4ms 30: learn: 0.6903190 total: 143ms remaining: 87.5ms 31: learn: 0.6902388 total: 147ms remaining: 82.6ms 32: learn: 0.6901481 total: 152ms remaining: 78.1ms 33: learn: 0.6900555 total: 156ms remaining: 73.4ms 34: learn: 0.6899723 total: 160ms remaining: 68.6ms 35: learn: 0.6898756 total: 164ms remaining: 63.9ms 36: learn: 0.6897852 total: 169ms remaining: 59.3ms 37: learn: 0.6896972 total: 173ms remaining: 54.6ms 38: learn: 0.6896087 total: 178ms remaining: 50.1ms 39: learn: 0.6895292 total: 182ms remaining: 45.5ms 40: learn: 0.6894447 total: 186ms remaining: 40.9ms 41: learn: 0.6893578 total: 191ms remaining: 36.3ms 42: learn: 0.6892653 total: 196ms remaining: 31.8ms 43: learn: 0.6891709 total: 200ms remaining: 27.2ms 44: learn: 0.6890794 total: 204ms remaining: 22.7ms 45: learn: 0.6889930 total: 208ms remaining: 18.1ms 46: learn: 0.6889089 total: 213ms remaining: 13.6ms 47: learn: 0.6888195 total: 217ms remaining: 9.04ms 48: learn: 0.6887380 total: 221ms remaining: 4.51ms 49: learn: 0.6886598 total: 225ms remaining: 0us 0: learn: 0.6930443 total: 4.79ms remaining: 235ms 1: learn: 0.6929517 total: 9.92ms remaining: 238ms 2: learn: 0.6928611 total: 14.7ms remaining: 231ms 3: learn: 0.6927705 total: 19.2ms remaining: 221ms 4: learn: 0.6926741 total: 24ms remaining: 216ms 5: learn: 0.6925844 total: 28.7ms remaining: 210ms 6: learn: 0.6924939 total: 33.2ms remaining: 204ms 7: learn: 0.6923966 total: 38.1ms remaining: 200ms 8: learn: 0.6923099 total: 42.7ms remaining: 195ms 9: learn: 0.6922241 total: 47.7ms remaining: 191ms 10: learn: 0.6921377 total: 52.4ms remaining: 186ms 11: learn: 0.6920474 total: 57ms remaining: 181ms 12: learn: 0.6919601 total: 61.8ms remaining: 176ms 13: learn: 0.6918781 total: 66.7ms remaining: 171ms 14: learn: 0.6917840 total: 71.5ms remaining: 167ms 15: learn: 0.6917008 total: 76.3ms remaining: 162ms 16: learn: 0.6916175 total: 80.9ms remaining: 157ms 17: learn: 0.6915288 total: 85.6ms remaining: 152ms 18: learn: 0.6914401 total: 90.2ms remaining: 147ms 19: learn: 0.6913508 total: 95ms remaining: 142ms 20: learn: 0.6912669 total: 99.3ms remaining: 137ms 21: learn: 0.6911794 total: 104ms remaining: 132ms 22: learn: 0.6910865 total: 108ms remaining: 126ms 23: learn: 0.6910039 total: 112ms remaining: 121ms 24: learn: 0.6909129 total: 116ms remaining: 116ms 25: learn: 0.6908243 total: 120ms remaining: 111ms 26: learn: 0.6907354 total: 125ms remaining: 106ms 27: learn: 0.6906436 total: 129ms remaining: 102ms 28: learn: 0.6905573 total: 134ms remaining: 96.7ms 29: learn: 0.6904758 total: 138ms remaining: 91.8ms 30: learn: 0.6903964 total: 142ms remaining: 87ms 31: learn: 0.6903145 total: 146ms remaining: 82.2ms 32: learn: 0.6902311 total: 150ms remaining: 77.5ms 33: learn: 0.6901474 total: 155ms remaining: 72.8ms 34: learn: 0.6900649 total: 159ms remaining: 68.1ms 35: learn: 0.6899740 total: 163ms remaining: 63.4ms 36: learn: 0.6898832 total: 167ms remaining: 58.8ms 37: learn: 0.6897969 total: 172ms remaining: 54.2ms 38: learn: 0.6897095 total: 176ms remaining: 49.6ms 39: learn: 0.6896326 total: 180ms remaining: 45ms 40: learn: 0.6895494 total: 184ms remaining: 40.4ms 41: learn: 0.6894634 total: 189ms remaining: 36ms 42: learn: 0.6893850 total: 194ms remaining: 31.5ms 43: learn: 0.6892923 total: 198ms remaining: 27ms 44: learn: 0.6892034 total: 202ms remaining: 22.5ms 45: learn: 0.6891230 total: 207ms remaining: 18ms 46: learn: 0.6890420 total: 211ms remaining: 13.4ms 47: learn: 0.6889531 total: 215ms remaining: 8.95ms 48: learn: 0.6888752 total: 219ms remaining: 4.48ms 49: learn: 0.6887966 total: 224ms remaining: 0us 0: learn: 0.6930547 total: 4.86ms remaining: 238ms 1: learn: 0.6929511 total: 9.93ms remaining: 238ms 2: learn: 0.6928648 total: 14.5ms remaining: 227ms 3: learn: 0.6927714 total: 19.3ms remaining: 222ms 4: learn: 0.6926688 total: 24.1ms remaining: 217ms 5: learn: 0.6925755 total: 28.9ms remaining: 212ms 6: learn: 0.6924858 total: 33.7ms remaining: 207ms 7: learn: 0.6923920 total: 38.6ms remaining: 203ms 8: learn: 0.6923009 total: 43.3ms remaining: 197ms 9: learn: 0.6922124 total: 47.8ms remaining: 191ms 10: learn: 0.6921237 total: 52.6ms remaining: 187ms 11: learn: 0.6920327 total: 57.1ms remaining: 181ms 12: learn: 0.6919424 total: 61.8ms remaining: 176ms 13: learn: 0.6918480 total: 66.7ms remaining: 171ms 14: learn: 0.6917546 total: 71.4ms remaining: 167ms 15: learn: 0.6916703 total: 76.4ms remaining: 162ms 16: learn: 0.6915859 total: 81.2ms remaining: 158ms 17: learn: 0.6914846 total: 85.8ms remaining: 152ms 18: learn: 0.6913889 total: 90.6ms remaining: 148ms 19: learn: 0.6912941 total: 95.5ms remaining: 143ms 20: learn: 0.6912021 total: 99.8ms remaining: 138ms 21: learn: 0.6911061 total: 104ms remaining: 133ms 22: learn: 0.6910148 total: 108ms remaining: 127ms 23: learn: 0.6909300 total: 113ms remaining: 122ms 24: learn: 0.6908405 total: 117ms remaining: 117ms 25: learn: 0.6907502 total: 121ms remaining: 112ms 26: learn: 0.6906612 total: 125ms remaining: 107ms 27: learn: 0.6905653 total: 129ms remaining: 102ms 28: learn: 0.6904712 total: 134ms remaining: 96.7ms 29: learn: 0.6903851 total: 138ms remaining: 91.8ms 30: learn: 0.6903019 total: 142ms remaining: 86.9ms 31: learn: 0.6902164 total: 146ms remaining: 82.2ms 32: learn: 0.6901178 total: 150ms remaining: 77.5ms 33: learn: 0.6900208 total: 155ms remaining: 72.9ms 34: learn: 0.6899377 total: 159ms remaining: 68.2ms 35: learn: 0.6898412 total: 163ms remaining: 63.6ms 36: learn: 0.6897517 total: 168ms remaining: 58.9ms 37: learn: 0.6896630 total: 172ms remaining: 54.3ms 38: learn: 0.6895754 total: 177ms remaining: 49.8ms 39: learn: 0.6894956 total: 181ms remaining: 45.2ms 40: learn: 0.6894109 total: 185ms remaining: 40.7ms 41: learn: 0.6893229 total: 190ms remaining: 36.2ms 42: learn: 0.6892433 total: 194ms remaining: 31.7ms 43: learn: 0.6891448 total: 199ms remaining: 27.1ms 44: learn: 0.6890543 total: 203ms remaining: 22.5ms 45: learn: 0.6889670 total: 207ms remaining: 18ms 46: learn: 0.6888831 total: 211ms remaining: 13.5ms 47: learn: 0.6887937 total: 216ms remaining: 8.99ms 48: learn: 0.6887110 total: 220ms remaining: 4.49ms 49: learn: 0.6886262 total: 224ms remaining: 0us 0: learn: 0.6930443 total: 4.9ms remaining: 240ms 1: learn: 0.6929525 total: 10.2ms remaining: 245ms 2: learn: 0.6928646 total: 14.7ms remaining: 231ms 3: learn: 0.6927832 total: 19.2ms remaining: 221ms 4: learn: 0.6926894 total: 23.8ms remaining: 215ms 5: learn: 0.6925963 total: 28.5ms remaining: 209ms 6: learn: 0.6925016 total: 33.3ms remaining: 205ms 7: learn: 0.6924088 total: 38.1ms remaining: 200ms 8: learn: 0.6923209 total: 42.9ms remaining: 196ms 9: learn: 0.6922313 total: 47.5ms remaining: 190ms 10: learn: 0.6921447 total: 52.4ms remaining: 186ms 11: learn: 0.6920557 total: 57.2ms remaining: 181ms 12: learn: 0.6919649 total: 61.8ms remaining: 176ms 13: learn: 0.6918775 total: 66.5ms remaining: 171ms 14: learn: 0.6917820 total: 71.3ms remaining: 166ms 15: learn: 0.6916973 total: 76ms remaining: 162ms 16: learn: 0.6916133 total: 80.7ms remaining: 157ms 17: learn: 0.6915232 total: 85.4ms remaining: 152ms 18: learn: 0.6914290 total: 90.3ms remaining: 147ms 19: learn: 0.6913346 total: 94.9ms remaining: 142ms 20: learn: 0.6912512 total: 99.6ms remaining: 137ms 21: learn: 0.6911580 total: 104ms remaining: 133ms 22: learn: 0.6910653 total: 109ms remaining: 128ms 23: learn: 0.6909720 total: 113ms remaining: 122ms 24: learn: 0.6908842 total: 117ms remaining: 117ms 25: learn: 0.6907942 total: 122ms remaining: 113ms 26: learn: 0.6907049 total: 126ms remaining: 108ms 27: learn: 0.6906125 total: 130ms remaining: 103ms 28: learn: 0.6905176 total: 135ms remaining: 97.5ms 29: learn: 0.6904307 total: 139ms remaining: 92.5ms 30: learn: 0.6903474 total: 143ms remaining: 87.6ms 31: learn: 0.6902662 total: 147ms remaining: 82.7ms 32: learn: 0.6901761 total: 151ms remaining: 77.9ms 33: learn: 0.6900904 total: 155ms remaining: 73.1ms 34: learn: 0.6900050 total: 160ms remaining: 68.4ms 35: learn: 0.6899153 total: 164ms remaining: 63.7ms 36: learn: 0.6898247 total: 168ms remaining: 59ms 37: learn: 0.6897368 total: 172ms remaining: 54.4ms 38: learn: 0.6896486 total: 176ms remaining: 49.8ms 39: learn: 0.6895688 total: 181ms remaining: 45.3ms 40: learn: 0.6894828 total: 186ms remaining: 40.8ms 41: learn: 0.6893941 total: 190ms remaining: 36.3ms 42: learn: 0.6893062 total: 195ms remaining: 31.8ms 43: learn: 0.6892155 total: 200ms remaining: 27.2ms 44: learn: 0.6891235 total: 204ms remaining: 22.7ms 45: learn: 0.6890369 total: 208ms remaining: 18.1ms 46: learn: 0.6889546 total: 212ms remaining: 13.5ms 47: learn: 0.6888630 total: 217ms remaining: 9.03ms 48: learn: 0.6887806 total: 221ms remaining: 4.5ms 49: learn: 0.6887016 total: 225ms remaining: 0us 0: learn: 0.6930467 total: 4.82ms remaining: 236ms 1: learn: 0.6929545 total: 9.77ms remaining: 235ms 2: learn: 0.6928628 total: 14.5ms remaining: 227ms 3: learn: 0.6927671 total: 19.7ms remaining: 226ms 4: learn: 0.6926661 total: 24.3ms remaining: 219ms 5: learn: 0.6925708 total: 29ms remaining: 213ms 6: learn: 0.6924787 total: 33.8ms remaining: 208ms 7: learn: 0.6923846 total: 38.5ms remaining: 202ms 8: learn: 0.6922955 total: 43.3ms remaining: 197ms 9: learn: 0.6922050 total: 48ms remaining: 192ms 10: learn: 0.6921060 total: 52.6ms remaining: 186ms 11: learn: 0.6920166 total: 57.2ms remaining: 181ms 12: learn: 0.6919264 total: 62.1ms remaining: 177ms 13: learn: 0.6918286 total: 67.2ms remaining: 173ms 14: learn: 0.6917336 total: 72.1ms remaining: 168ms 15: learn: 0.6916508 total: 77.2ms remaining: 164ms 16: learn: 0.6915645 total: 81.8ms remaining: 159ms 17: learn: 0.6914664 total: 86.5ms remaining: 154ms 18: learn: 0.6913711 total: 91.4ms remaining: 149ms 19: learn: 0.6912756 total: 96.1ms remaining: 144ms 20: learn: 0.6911914 total: 101ms remaining: 139ms 21: learn: 0.6910939 total: 105ms remaining: 134ms 22: learn: 0.6910013 total: 109ms remaining: 128ms 23: learn: 0.6909191 total: 113ms remaining: 123ms 24: learn: 0.6908253 total: 118ms remaining: 118ms 25: learn: 0.6907243 total: 122ms remaining: 112ms 26: learn: 0.6906345 total: 126ms remaining: 107ms 27: learn: 0.6905432 total: 130ms remaining: 103ms 28: learn: 0.6904536 total: 135ms remaining: 97.6ms 29: learn: 0.6903656 total: 139ms remaining: 92.7ms 30: learn: 0.6902808 total: 143ms remaining: 87.8ms 31: learn: 0.6902003 total: 147ms remaining: 82.9ms 32: learn: 0.6901094 total: 152ms remaining: 78.2ms 33: learn: 0.6900125 total: 156ms remaining: 73.4ms 34: learn: 0.6899287 total: 160ms remaining: 68.6ms 35: learn: 0.6898332 total: 164ms remaining: 63.9ms 36: learn: 0.6897414 total: 169ms remaining: 59.3ms 37: learn: 0.6896508 total: 173ms remaining: 54.6ms 38: learn: 0.6895647 total: 177ms remaining: 50ms 39: learn: 0.6894845 total: 182ms remaining: 45.4ms 40: learn: 0.6893986 total: 186ms remaining: 40.9ms 41: learn: 0.6893081 total: 191ms remaining: 36.4ms 42: learn: 0.6892175 total: 196ms remaining: 31.8ms 43: learn: 0.6891239 total: 200ms remaining: 27.3ms 44: learn: 0.6890373 total: 204ms remaining: 22.7ms 45: learn: 0.6889475 total: 209ms remaining: 18.1ms 46: learn: 0.6888619 total: 213ms remaining: 13.6ms 47: learn: 0.6887718 total: 217ms remaining: 9.04ms 48: learn: 0.6886872 total: 221ms remaining: 4.52ms 49: learn: 0.6886024 total: 226ms remaining: 0us 0: learn: 0.6930468 total: 5.06ms remaining: 248ms 1: learn: 0.6929566 total: 10.1ms remaining: 242ms 2: learn: 0.6928693 total: 14.8ms remaining: 232ms 3: learn: 0.6927756 total: 19.8ms remaining: 228ms 4: learn: 0.6926774 total: 24.5ms remaining: 221ms 5: learn: 0.6925827 total: 29.2ms remaining: 214ms 6: learn: 0.6924925 total: 34.1ms remaining: 210ms 7: learn: 0.6923999 total: 39.2ms remaining: 206ms 8: learn: 0.6923097 total: 43.9ms remaining: 200ms 9: learn: 0.6922205 total: 48.6ms remaining: 195ms 10: learn: 0.6921302 total: 53.4ms remaining: 189ms 11: learn: 0.6920439 total: 57.8ms remaining: 183ms 12: learn: 0.6919558 total: 62.7ms remaining: 179ms 13: learn: 0.6918583 total: 67.4ms remaining: 173ms 14: learn: 0.6917702 total: 72.3ms remaining: 169ms 15: learn: 0.6916895 total: 76.9ms remaining: 163ms 16: learn: 0.6916063 total: 81.7ms remaining: 159ms 17: learn: 0.6915143 total: 86.3ms remaining: 153ms 18: learn: 0.6914255 total: 91ms remaining: 149ms 19: learn: 0.6913322 total: 95.4ms remaining: 143ms 20: learn: 0.6912455 total: 99.7ms remaining: 138ms 21: learn: 0.6911560 total: 104ms remaining: 133ms 22: learn: 0.6910628 total: 108ms remaining: 127ms 23: learn: 0.6909714 total: 113ms remaining: 122ms 24: learn: 0.6908861 total: 117ms remaining: 117ms 25: learn: 0.6907985 total: 121ms remaining: 112ms 26: learn: 0.6907115 total: 125ms remaining: 107ms 27: learn: 0.6906238 total: 129ms remaining: 102ms 28: learn: 0.6905357 total: 134ms remaining: 96.8ms 29: learn: 0.6904495 total: 138ms remaining: 91.8ms 30: learn: 0.6903678 total: 142ms remaining: 87ms 31: learn: 0.6902830 total: 146ms remaining: 82.3ms 32: learn: 0.6901897 total: 151ms remaining: 77.6ms 33: learn: 0.6900952 total: 155ms remaining: 72.8ms 34: learn: 0.6900130 total: 159ms remaining: 68.2ms 35: learn: 0.6899167 total: 163ms remaining: 63.5ms 36: learn: 0.6898293 total: 168ms remaining: 59ms 37: learn: 0.6897367 total: 172ms remaining: 54.3ms 38: learn: 0.6896500 total: 177ms remaining: 49.8ms 39: learn: 0.6895699 total: 181ms remaining: 45.3ms 40: learn: 0.6894869 total: 186ms remaining: 40.7ms 41: learn: 0.6893990 total: 190ms remaining: 36.2ms 42: learn: 0.6893083 total: 194ms remaining: 31.6ms 43: learn: 0.6892141 total: 199ms remaining: 27.1ms 44: learn: 0.6891300 total: 203ms remaining: 22.6ms 45: learn: 0.6890425 total: 207ms remaining: 18ms 46: learn: 0.6889607 total: 211ms remaining: 13.5ms 47: learn: 0.6888711 total: 216ms remaining: 8.99ms 48: learn: 0.6887903 total: 220ms remaining: 4.49ms 49: learn: 0.6887130 total: 224ms remaining: 0us 0: learn: 0.6930531 total: 4.85ms remaining: 238ms 1: learn: 0.6929694 total: 9.7ms remaining: 233ms 2: learn: 0.6928776 total: 14.4ms remaining: 226ms 3: learn: 0.6927957 total: 19.1ms remaining: 220ms 4: learn: 0.6927006 total: 23.9ms remaining: 215ms 5: learn: 0.6926130 total: 28.6ms remaining: 209ms 6: learn: 0.6925219 total: 33.2ms remaining: 204ms 7: learn: 0.6924288 total: 37.9ms remaining: 199ms 8: learn: 0.6923451 total: 42.7ms remaining: 194ms 9: learn: 0.6922595 total: 47.2ms remaining: 189ms 10: learn: 0.6921746 total: 51.7ms remaining: 183ms 11: learn: 0.6920875 total: 56.2ms remaining: 178ms 12: learn: 0.6920013 total: 61.1ms remaining: 174ms 13: learn: 0.6919094 total: 66ms remaining: 170ms 14: learn: 0.6918202 total: 70.9ms remaining: 166ms 15: learn: 0.6917415 total: 75.6ms remaining: 161ms 16: learn: 0.6916506 total: 80.5ms remaining: 156ms 17: learn: 0.6915620 total: 85.1ms remaining: 151ms 18: learn: 0.6914727 total: 90ms remaining: 147ms 19: learn: 0.6913805 total: 94.5ms remaining: 142ms 20: learn: 0.6912945 total: 98.9ms remaining: 137ms 21: learn: 0.6912087 total: 103ms remaining: 131ms 22: learn: 0.6911250 total: 108ms remaining: 126ms 23: learn: 0.6910457 total: 112ms remaining: 121ms 24: learn: 0.6909565 total: 116ms remaining: 116ms 25: learn: 0.6908693 total: 121ms remaining: 111ms 26: learn: 0.6907829 total: 125ms remaining: 107ms 27: learn: 0.6906995 total: 129ms remaining: 102ms 28: learn: 0.6906078 total: 134ms remaining: 96.7ms 29: learn: 0.6905218 total: 138ms remaining: 91.9ms 30: learn: 0.6904399 total: 142ms remaining: 87ms 31: learn: 0.6903609 total: 146ms remaining: 82.2ms 32: learn: 0.6902704 total: 150ms remaining: 77.5ms 33: learn: 0.6901865 total: 155ms remaining: 72.8ms 34: learn: 0.6901045 total: 159ms remaining: 68.1ms 35: learn: 0.6900085 total: 163ms remaining: 63.4ms 36: learn: 0.6899207 total: 167ms remaining: 58.7ms 37: learn: 0.6898266 total: 171ms remaining: 54.1ms 38: learn: 0.6897435 total: 175ms remaining: 49.5ms 39: learn: 0.6896632 total: 179ms remaining: 44.9ms 40: learn: 0.6895810 total: 184ms remaining: 40.3ms 41: learn: 0.6894940 total: 189ms remaining: 35.9ms 42: learn: 0.6894040 total: 193ms remaining: 31.5ms 43: learn: 0.6893162 total: 198ms remaining: 27ms 44: learn: 0.6892266 total: 203ms remaining: 22.5ms 45: learn: 0.6891416 total: 207ms remaining: 18ms 46: learn: 0.6890588 total: 211ms remaining: 13.5ms 47: learn: 0.6889707 total: 215ms remaining: 8.97ms 48: learn: 0.6888846 total: 220ms remaining: 4.48ms 49: learn: 0.6888042 total: 224ms remaining: 0us 0: learn: 0.6841112 total: 5.07ms remaining: 249ms 1: learn: 0.6769132 total: 10.6ms remaining: 254ms 2: learn: 0.6699189 total: 15.4ms remaining: 241ms 3: learn: 0.6644464 total: 20.3ms remaining: 233ms 4: learn: 0.6590241 total: 25ms remaining: 225ms 5: learn: 0.6546299 total: 29.9ms remaining: 219ms 6: learn: 0.6510139 total: 34.9ms remaining: 214ms 7: learn: 0.6476633 total: 39.8ms remaining: 209ms 8: learn: 0.6443936 total: 44.9ms remaining: 204ms 9: learn: 0.6417391 total: 49.4ms remaining: 197ms 10: learn: 0.6390964 total: 54.2ms remaining: 192ms 11: learn: 0.6364200 total: 59.1ms remaining: 187ms 12: learn: 0.6348021 total: 63.8ms remaining: 182ms 13: learn: 0.6326353 total: 68.9ms remaining: 177ms 14: learn: 0.6306371 total: 74.1ms remaining: 173ms 15: learn: 0.6290240 total: 79ms remaining: 168ms 16: learn: 0.6262280 total: 83.9ms remaining: 163ms 17: learn: 0.6246000 total: 88.7ms remaining: 158ms 18: learn: 0.6225401 total: 93.5ms remaining: 153ms 19: learn: 0.6212762 total: 97.9ms remaining: 147ms 20: learn: 0.6193386 total: 103ms remaining: 142ms 21: learn: 0.6175938 total: 107ms remaining: 136ms 22: learn: 0.6157911 total: 111ms remaining: 131ms 23: learn: 0.6143848 total: 116ms remaining: 126ms 24: learn: 0.6126024 total: 120ms remaining: 120ms 25: learn: 0.6108769 total: 125ms remaining: 115ms 26: learn: 0.6087401 total: 129ms remaining: 110ms 27: learn: 0.6072125 total: 134ms remaining: 105ms 28: learn: 0.6056880 total: 138ms remaining: 100ms 29: learn: 0.6047054 total: 143ms remaining: 95.2ms 30: learn: 0.6035148 total: 147ms remaining: 90.1ms 31: learn: 0.6018584 total: 151ms remaining: 85.1ms 32: learn: 0.6010133 total: 156ms remaining: 80.2ms 33: learn: 0.5991874 total: 160ms remaining: 75.4ms 34: learn: 0.5982674 total: 165ms remaining: 70.6ms 35: learn: 0.5971569 total: 169ms remaining: 65.8ms 36: learn: 0.5963368 total: 173ms remaining: 61ms 37: learn: 0.5952957 total: 178ms remaining: 56.1ms 38: learn: 0.5945640 total: 182ms remaining: 51.4ms 39: learn: 0.5934949 total: 187ms remaining: 46.7ms 40: learn: 0.5921036 total: 192ms remaining: 42.1ms 41: learn: 0.5915830 total: 196ms remaining: 37.4ms 42: learn: 0.5907880 total: 201ms remaining: 32.7ms 43: learn: 0.5898932 total: 206ms remaining: 28.1ms 44: learn: 0.5887470 total: 211ms remaining: 23.4ms 45: learn: 0.5873883 total: 215ms remaining: 18.7ms 46: learn: 0.5864025 total: 219ms remaining: 14ms 47: learn: 0.5847452 total: 224ms remaining: 9.33ms 48: learn: 0.5839259 total: 228ms remaining: 4.66ms 49: learn: 0.5827812 total: 233ms remaining: 0us
GridSearchCV(cv=10,
estimator=<catboost.core.CatBoostClassifier object at 0x0000018A6C9E8040>,
param_grid=[{'learning_rate': [0.1, 0.01, 0.001],
'n_estimators': [10, 25, 50]}],
scoring='accuracy')
model_cat = CatBoostClassifier(**model_cat.best_params_)
model_cat.fit(X_train,Y_train)
pred_ = model_cat.predict(X_test)
acc = accuracy_score(Y_test,pred_)
prec = precision_score(Y_test,pred_,average="weighted")
rec = recall_score(Y_test,pred_,average="weighted")
print("Accuracy :: ",acc)
print("Precision :: ",prec)
print("Recall :: ",rec)
results.loc["CatBoostClassifier"] = [acc,prec,rec]
0: learn: 0.6841112 total: 4.93ms remaining: 242ms 1: learn: 0.6769132 total: 10.1ms remaining: 241ms 2: learn: 0.6699189 total: 14.7ms remaining: 231ms 3: learn: 0.6644464 total: 19.7ms remaining: 226ms 4: learn: 0.6590241 total: 24.7ms remaining: 223ms 5: learn: 0.6546299 total: 29.6ms remaining: 217ms 6: learn: 0.6510139 total: 34.5ms remaining: 212ms 7: learn: 0.6476633 total: 39.6ms remaining: 208ms 8: learn: 0.6443936 total: 44.6ms remaining: 203ms 9: learn: 0.6417391 total: 49.1ms remaining: 196ms 10: learn: 0.6390964 total: 54.1ms remaining: 192ms 11: learn: 0.6364200 total: 59.3ms remaining: 188ms 12: learn: 0.6348021 total: 64ms remaining: 182ms 13: learn: 0.6326353 total: 69.7ms remaining: 179ms 14: learn: 0.6306371 total: 74.9ms remaining: 175ms 15: learn: 0.6290240 total: 80.1ms remaining: 170ms 16: learn: 0.6262280 total: 84.8ms remaining: 165ms 17: learn: 0.6246000 total: 89.5ms remaining: 159ms 18: learn: 0.6225401 total: 94.5ms remaining: 154ms 19: learn: 0.6212762 total: 99.2ms remaining: 149ms 20: learn: 0.6193386 total: 104ms remaining: 144ms 21: learn: 0.6175938 total: 109ms remaining: 138ms 22: learn: 0.6157911 total: 113ms remaining: 133ms 23: learn: 0.6143848 total: 117ms remaining: 127ms 24: learn: 0.6126024 total: 122ms remaining: 122ms 25: learn: 0.6108769 total: 126ms remaining: 116ms 26: learn: 0.6087401 total: 131ms remaining: 111ms 27: learn: 0.6072125 total: 135ms remaining: 106ms 28: learn: 0.6056880 total: 139ms remaining: 101ms 29: learn: 0.6047054 total: 144ms remaining: 95.8ms 30: learn: 0.6035148 total: 148ms remaining: 90.7ms 31: learn: 0.6018584 total: 152ms remaining: 85.6ms 32: learn: 0.6010133 total: 156ms remaining: 80.6ms 33: learn: 0.5991874 total: 161ms remaining: 75.7ms 34: learn: 0.5982674 total: 165ms remaining: 70.8ms 35: learn: 0.5971569 total: 170ms remaining: 66ms 36: learn: 0.5963368 total: 174ms remaining: 61.2ms 37: learn: 0.5952957 total: 179ms remaining: 56.6ms 38: learn: 0.5945640 total: 184ms remaining: 51.8ms 39: learn: 0.5934949 total: 189ms remaining: 47.1ms 40: learn: 0.5921036 total: 194ms remaining: 42.5ms 41: learn: 0.5915830 total: 198ms remaining: 37.8ms 42: learn: 0.5907880 total: 203ms remaining: 33.1ms 43: learn: 0.5898932 total: 208ms remaining: 28.3ms 44: learn: 0.5887470 total: 212ms remaining: 23.5ms 45: learn: 0.5873883 total: 216ms remaining: 18.8ms 46: learn: 0.5864025 total: 221ms remaining: 14.1ms 47: learn: 0.5847452 total: 225ms remaining: 9.38ms 48: learn: 0.5839259 total: 230ms remaining: 4.68ms 49: learn: 0.5827812 total: 234ms remaining: 0us Accuracy :: 0.7178021757657793 Precision :: 0.7178051950064999 Recall :: 0.7178021757657793
results = results*100
results
| Accuracy | Precison | Recall | |
|---|---|---|---|
| LogisticRegression | 61.43781 | 63.221711 | 61.43781 |
| DecisionTreeClassifier | 63.242593 | 64.650716 | 63.242593 |
| RandomForestClassifier | 91.387176 | 91.394967 | 91.387176 |
| XGBClassifier | 91.296937 | 91.303619 | 91.296937 |
| CatBoostClassifier | 71.780218 | 71.78052 | 71.780218 |
px.bar(results,y ="Accuracy",x = results.index,color = results.index,title="Accuracy Comparison")
px.bar(results,y ="Precison",x = results.index,color = results.index,title="Precision Comparison")
px.bar(results,y ="Recall",x = results.index,color = results.index,title="Recall Comparison")
# Getting confusion matrix
pred_ = model_forest.predict(X_test)
conf_matrix = confusion_matrix(Y_test,pred_)
plt.figure(figsize=(10,8))
plt.title("HeatMap of Random Forest")
sns.heatmap(conf_matrix, annot=True,fmt='.10g')
plt.show()
plt.figure(figsize=(10,8))
# Compute fpr, tpr, thresholds and roc auc
fpr, tpr, thresholds = roc_curve(Y_test, pred_)
roc_auc = auc(fpr, tpr)
# Plot ROC curve
plt.plot(fpr, tpr, label='ROC curve (area = %0.3f)' % roc_auc)
plt.plot([0, 1], [0, 1], 'k--') # random predictions curve
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.0])
plt.xlabel('False Positive Rate or (1 - Specifity)')
plt.ylabel('True Positive Rate or (Sensitivity)')
plt.title('Receiver Operating Characteristic')
plt.legend(loc="lower right")
plt.show()